From 36907794aba28359da8c40fb59f161f0255eadf5 Mon Sep 17 00:00:00 2001 From: wangwl Date: Wed, 17 Dec 2025 09:05:58 +0000 Subject: [PATCH 1/3] add ResNetV1bV1_5 --- .../ResNetV1bV1_5/ResNet-PyTorch/LICENSE | 201 +++++ .../ResNetV1bV1_5/ResNet-PyTorch/README.md | 155 ++++ .../ResNetV1bV1_5/ResNet-PyTorch/config.py | 89 +++ .../data/ImageNet_1K_labels_map.txt | 1 + .../ResNet-PyTorch/data/README.md | 43 ++ .../ResNetV1bV1_5/ResNet-PyTorch/dataset.py | 230 ++++++ .../ResNet-PyTorch/figure/n01440764_36.JPEG | Bin 0 -> 35135 bytes .../ResNetV1bV1_5/ResNet-PyTorch/imgproc.py | 253 +++++++ .../ResNetV1bV1_5/ResNet-PyTorch/inference.py | 125 ++++ .../ResNetV1bV1_5/ResNet-PyTorch/model.py | 252 +++++++ .../ResNet-PyTorch/requirements.txt | 5 + .../scripts/preprocess_imagenet.sh | 34 + .../scripts/preprocess_mini_imagenet.py | 72 ++ .../ResNetV1bV1_5/ResNet-PyTorch/test.py | 127 ++++ .../ResNetV1bV1_5/ResNet-PyTorch/train.py | 350 +++++++++ .../ResNetV1bV1_5/ResNet-PyTorch/utils.py | 198 +++++ .../Classification/ResNetV1bV1_5/coverage.txt | 3 + .../Classification/ResNetV1bV1_5/resnet.py | 280 +++++++ .../ResNetV1bV1_5/resnet_loss.jpg | Bin 0 -> 35594 bytes .../ResNetV1bV1_5/resnet_loss.txt | 29 + .../ResNetV1bV1_5/weloTrainStep.py | 692 ++++++++++++++++++ 21 files changed, 3139 insertions(+) create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/LICENSE create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/README.md create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/config.py create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/ImageNet_1K_labels_map.txt create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/README.md create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/dataset.py create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/figure/n01440764_36.JPEG create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/imgproc.py create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/inference.py create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/model.py create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/requirements.txt create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_imagenet.sh create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_mini_imagenet.py create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/test.py create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/train.py create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/utils.py create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/coverage.txt create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/resnet.py create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/resnet_loss.jpg create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/resnet_loss.txt create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/weloTrainStep.py diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/LICENSE b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/LICENSE new file mode 100644 index 000000000..deeea2d8c --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/LICENSE @@ -0,0 +1,201 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/README.md b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/README.md new file mode 100644 index 000000000..8ac2056a5 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/README.md @@ -0,0 +1,155 @@ +# ResNet-PyTorch + +## Overview + +This repository contains an op-for-op PyTorch reimplementation +of [Searching for ResNet](https://arxiv.org/pdf/1512.03385v1.pdf). + +## Table of contents + +- [ResNet-PyTorch](#resnet-pytorch) + - [Overview](#overview) + - [Table of contents](#table-of-contents) + - [Download weights](#download-weights) + - [Download datasets](#download-datasets) + - [How Test and Train](#how-test-and-train) + - [Test](#test) + - [Train model](#train-model) + - [Resume train model](#resume-train-model) + - [Result](#result) + - [Contributing](#contributing) + - [Credit](#credit) + - [Deep Residual Learning for Image Recognition](#deep-residual-learning-for-image-recognition) + +## Download weights + +- [Google Driver](https://drive.google.com/drive/folders/17ju2HN7Y6pyPK2CC_AqnAfTOe9_3hCQ8?usp=sharing) +- [Baidu Driver](https://pan.baidu.com/s/1yNs4rqIb004-NKEdKBJtYg?pwd=llot) + +## Download datasets + +Contains MNIST, CIFAR10&CIFAR100, TinyImageNet_200, MiniImageNet_1K, ImageNet_1K, Caltech101&Caltech256 and more etc. + +- [Google Driver](https://drive.google.com/drive/folders/1f-NSpZc07Qlzhgi6EbBEI1wTkN1MxPbQ?usp=sharing) +- [Baidu Driver](https://pan.baidu.com/s/1arNM38vhDT7p4jKeD4sqwA?pwd=llot) + +Please refer to `README.md` in the `data` directory for the method of making a dataset. + +## How Test and Train + +Both training and testing only need to modify the `config.py` file. + +### Test + +- line 29: `model_arch_name` change to `resnet18`. +- line 31: `model_mean_parameters` change to `[0.485, 0.456, 0.406]`. +- line 32: `model_std_parameters` change to `[0.229, 0.224, 0.225]`. +- line 34: `model_num_classes` change to `1000`. +- line 36: `mode` change to `test`. +- line 89: `model_weights_path` change to `./results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar`. + +```bash +python3 test.py +``` + +### Train model + +- line 29: `model_arch_name` change to `resnet18`. +- line 31: `model_mean_parameters` change to `[0.485, 0.456, 0.406]`. +- line 32: `model_std_parameters` change to `[0.229, 0.224, 0.225]`. +- line 34: `model_num_classes` change to `1000`. +- line 36: `mode` change to `train`. +- line 50: `pretrained_model_weights_path` change to `./results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar`. + +```bash +python3 train.py +``` + +### Resume train model + +- line 29: `model_arch_name` change to `resnet18`. +- line 31: `model_mean_parameters` change to `[0.485, 0.456, 0.406]`. +- line 32: `model_std_parameters` change to `[0.229, 0.224, 0.225]`. +- line 34: `model_num_classes` change to `1000`. +- line 36: `mode` change to `train`. +- line 53: `resume` change to `./samples/resnet18-ImageNet_1K/epoch_xxx.pth.tar`. + +```bash +python3 train.py +``` + +## Result + +Source of original paper results: [https://arxiv.org/pdf/1512.03385v1.pdf](https://arxiv.org/pdf/1512.03385v1.pdf)) + +In the following table, the top-x error value in `()` indicates the result of the project, and `-` indicates no test. + +| Model | Dataset | Top-1 error (val) | Top-5 error (val) | +|:---------:|:-----------:|:------------------:|:-----------------:| +| resnet18 | ImageNet_1K | 27.88%(**30.25%**) | -(**10.93%**) | +| resnet34 | ImageNet_1K | 25.03%(**26.71%**) | 7.76%(**8.58%**) | +| resnet50 | ImageNet_1K | 22.85%(**19.65%**) | 6.71%(**4.87%**) | +| resnet101 | ImageNet_1K | 21.75%(**18.33%**) | 6.05%(**4.34%**) | +| resnet152 | ImageNet_1K | 21.43%(**17.66%**) | 5.71%(**4.08%**) | + +```bash +# Download `ResNet18-ImageNet_1K-57bb63e.pth.tar` weights to `./results/pretrained_models` +# More detail see `README.md` +python3 ./inference.py +``` + +Input: + + + +Output: + +```text +Build `resnet18` model successfully. +Load `resnet18` model weights `/ResNet-PyTorch/results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar` successfully. +tench, Tinca tinca (91.46%) +barracouta, snoek (7.15%) +gar, garfish, garpike, billfish, Lepisosteus osseus (0.43%) +coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch (0.27%) +platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus (0.21%) +``` + +## Contributing + +If you find a bug, create a GitHub issue, or even better, submit a pull request. Similarly, if you have questions, +simply post them as GitHub issues. + +I look forward to seeing what the community does with these models! + +### Credit + +#### Deep Residual Learning for Image Recognition + +*Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun* + +##### Abstract + +Deeper neural networks are more difficult to train. We present a residual learning framework to ease the training of +networks that are substantially deeper than those used previously. We explicitly reformulate the layers as learning +residual functions with reference to the layer inputs, instead of learning unreferenced functions. We provide +comprehensive empirical evidence showing that these residual networks are easier to optimize, and can gain accuracy from +considerably increased depth. On the ImageNet dataset we evaluate residual nets with a depth of up to 152 layers---8x +deeper than VGG nets but still having lower complexity. An ensemble of these residual nets achieves 3.57% error on the +ImageNet test set. This result won the 1st place on the ILSVRC 2015 classification task. We also present analysis on +CIFAR-10 with 100 and 1000 layers. +The depth of representations is of central importance for many visual recognition tasks. Solely due to our extremely +deep representations, we obtain a 28% relative improvement on the COCO object detection dataset. Deep residual nets are +foundations of our submissions to ILSVRC & COCO 2015 competitions, where we also won the 1st places on the tasks of +ImageNet detection, ImageNet localization, COCO detection, and COCO segmentation. + +[[Paper]](https://arxiv.org/pdf/1512.03385v1.pdf) + +```bibtex +@inproceedings{he2016deep, + title={Deep residual learning for image recognition}, + author={He, Kaiming and Zhang, Xiangyu and Ren, Shaoqing and Sun, Jian}, + booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition}, + pages={770--778}, + year={2016} +} +``` \ No newline at end of file diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/config.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/config.py new file mode 100644 index 000000000..49617d1af --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/config.py @@ -0,0 +1,89 @@ +# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import random + +import numpy as np +import torch +from torch.backends import cudnn + +# Random seed to maintain reproducible results +random.seed(0) +torch.manual_seed(0) +np.random.seed(0) +# Use GPU for training by default +device = torch.device("cuda", 0) +# Turning on when the image size does not change during training can speed up training +cudnn.benchmark = True +# Model arch name +model_arch_name = "resnet18" +# Model normalization parameters +model_mean_parameters = [0.485, 0.456, 0.406] +model_std_parameters = [0.229, 0.224, 0.225] +# Model number class +model_num_classes = 1000 +# Current configuration parameter method +mode = "train" +# Experiment name, easy to save weights and log files +exp_name = f"{model_arch_name}-ImageNet_1K" + +if mode == "train": + # Dataset address + train_image_dir = "./data/ImageNet_1K/ILSVRC2012_img_train" + valid_image_dir = "./data/ImageNet_1K/ILSVRC2012_img_val" + + image_size = 224 + batch_size = 128 + num_workers = 4 + + # The address to load the pretrained model + pretrained_model_weights_path = "./results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar" + + # Incremental training and migration training + resume = "" + + # Total num epochs + epochs = 600 + + # Loss parameters + loss_label_smoothing = 0.1 + loss_weights = 1.0 + + # Optimizer parameter + model_lr = 0.1 + model_momentum = 0.9 + model_weight_decay = 2e-05 + model_ema_decay = 0.99998 + + # Learning rate scheduler parameter + lr_scheduler_T_0 = epochs // 4 + lr_scheduler_T_mult = 1 + lr_scheduler_eta_min = 5e-5 + + # How many iterations to print the training/validate result + train_print_frequency = 200 + valid_print_frequency = 20 + +if mode == "test": + # Test data address + test_image_dir = "./data/ImageNet_1K/ILSVRC2012_img_val" + + # Test dataloader parameters + image_size = 224 + batch_size = 256 + num_workers = 4 + + # How many iterations to print the testing result + test_print_frequency = 20 + + model_weights_path = "./results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar" diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/ImageNet_1K_labels_map.txt b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/ImageNet_1K_labels_map.txt new file mode 100644 index 000000000..ae7d9a9cb --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/ImageNet_1K_labels_map.txt @@ -0,0 +1 @@ +{"0": "tench, Tinca tinca", "1": "goldfish, Carassius auratus", "2": "great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias", "3": "tiger shark, Galeocerdo cuvieri", "4": "hammerhead, hammerhead shark", "5": "electric ray, crampfish, numbfish, torpedo", "6": "stingray", "7": "cock", "8": "hen", "9": "ostrich, Struthio camelus", "10": "brambling, Fringilla montifringilla", "11": "goldfinch, Carduelis carduelis", "12": "house finch, linnet, Carpodacus mexicanus", "13": "junco, snowbird", "14": "indigo bunting, indigo finch, indigo bird, Passerina cyanea", "15": "robin, American robin, Turdus migratorius", "16": "bulbul", "17": "jay", "18": "magpie", "19": "chickadee", "20": "water ouzel, dipper", "21": "kite", "22": "bald eagle, American eagle, Haliaeetus leucocephalus", "23": "vulture", "24": "great grey owl, great gray owl, Strix nebulosa", "25": "European fire salamander, Salamandra salamandra", "26": "common newt, Triturus vulgaris", "27": "eft", "28": "spotted salamander, Ambystoma maculatum", "29": "axolotl, mud puppy, Ambystoma mexicanum", "30": "bullfrog, Rana catesbeiana", "31": "tree frog, tree-frog", "32": "tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui", "33": "loggerhead, loggerhead turtle, Caretta caretta", "34": "leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea", "35": "mud turtle", "36": "terrapin", "37": "box turtle, box tortoise", "38": "banded gecko", "39": "common iguana, iguana, Iguana iguana", "40": "American chameleon, anole, Anolis carolinensis", "41": "whiptail, whiptail lizard", "42": "agama", "43": "frilled lizard, Chlamydosaurus kingi", "44": "alligator lizard", "45": "Gila monster, Heloderma suspectum", "46": "green lizard, Lacerta viridis", "47": "African chameleon, Chamaeleo chamaeleon", "48": "Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis", "49": "African crocodile, Nile crocodile, Crocodylus niloticus", "50": "American alligator, Alligator mississipiensis", "51": "triceratops", "52": "thunder snake, worm snake, Carphophis amoenus", "53": "ringneck snake, ring-necked snake, ring snake", "54": "hognose snake, puff adder, sand viper", "55": "green snake, grass snake", "56": "king snake, kingsnake", "57": "garter snake, grass snake", "58": "water snake", "59": "vine snake", "60": "night snake, Hypsiglena torquata", "61": "boa constrictor, Constrictor constrictor", "62": "rock python, rock snake, Python sebae", "63": "Indian cobra, Naja naja", "64": "green mamba", "65": "sea snake", "66": "horned viper, cerastes, sand viper, horned asp, Cerastes cornutus", "67": "diamondback, diamondback rattlesnake, Crotalus adamanteus", "68": "sidewinder, horned rattlesnake, Crotalus cerastes", "69": "trilobite", "70": "harvestman, daddy longlegs, Phalangium opilio", "71": "scorpion", "72": "black and gold garden spider, Argiope aurantia", "73": "barn spider, Araneus cavaticus", "74": "garden spider, Aranea diademata", "75": "black widow, Latrodectus mactans", "76": "tarantula", "77": "wolf spider, hunting spider", "78": "tick", "79": "centipede", "80": "black grouse", "81": "ptarmigan", "82": "ruffed grouse, partridge, Bonasa umbellus", "83": "prairie chicken, prairie grouse, prairie fowl", "84": "peacock", "85": "quail", "86": "partridge", "87": "African grey, African gray, Psittacus erithacus", "88": "macaw", "89": "sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita", "90": "lorikeet", "91": "coucal", "92": "bee eater", "93": "hornbill", "94": "hummingbird", "95": "jacamar", "96": "toucan", "97": "drake", "98": "red-breasted merganser, Mergus serrator", "99": "goose", "100": "black swan, Cygnus atratus", "101": "tusker", "102": "echidna, spiny anteater, anteater", "103": "platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus", "104": "wallaby, brush kangaroo", "105": "koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus", "106": "wombat", "107": "jellyfish", "108": "sea anemone, anemone", "109": "brain coral", "110": "flatworm, platyhelminth", "111": "nematode, nematode worm, roundworm", "112": "conch", "113": "snail", "114": "slug", "115": "sea slug, nudibranch", "116": "chiton, coat-of-mail shell, sea cradle, polyplacophore", "117": "chambered nautilus, pearly nautilus, nautilus", "118": "Dungeness crab, Cancer magister", "119": "rock crab, Cancer irroratus", "120": "fiddler crab", "121": "king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica", "122": "American lobster, Northern lobster, Maine lobster, Homarus americanus", "123": "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish", "124": "crayfish, crawfish, crawdad, crawdaddy", "125": "hermit crab", "126": "isopod", "127": "white stork, Ciconia ciconia", "128": "black stork, Ciconia nigra", "129": "spoonbill", "130": "flamingo", "131": "little blue heron, Egretta caerulea", "132": "American egret, great white heron, Egretta albus", "133": "bittern", "134": "crane", "135": "limpkin, Aramus pictus", "136": "European gallinule, Porphyrio porphyrio", "137": "American coot, marsh hen, mud hen, water hen, Fulica americana", "138": "bustard", "139": "ruddy turnstone, Arenaria interpres", "140": "red-backed sandpiper, dunlin, Erolia alpina", "141": "redshank, Tringa totanus", "142": "dowitcher", "143": "oystercatcher, oyster catcher", "144": "pelican", "145": "king penguin, Aptenodytes patagonica", "146": "albatross, mollymawk", "147": "grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus", "148": "killer whale, killer, orca, grampus, sea wolf, Orcinus orca", "149": "dugong, Dugong dugon", "150": "sea lion", "151": "Chihuahua", "152": "Japanese spaniel", "153": "Maltese dog, Maltese terrier, Maltese", "154": "Pekinese, Pekingese, Peke", "155": "Shih-Tzu", "156": "Blenheim spaniel", "157": "papillon", "158": "toy terrier", "159": "Rhodesian ridgeback", "160": "Afghan hound, Afghan", "161": "basset, basset hound", "162": "beagle", "163": "bloodhound, sleuthhound", "164": "bluetick", "165": "black-and-tan coonhound", "166": "Walker hound, Walker foxhound", "167": "English foxhound", "168": "redbone", "169": "borzoi, Russian wolfhound", "170": "Irish wolfhound", "171": "Italian greyhound", "172": "whippet", "173": "Ibizan hound, Ibizan Podenco", "174": "Norwegian elkhound, elkhound", "175": "otterhound, otter hound", "176": "Saluki, gazelle hound", "177": "Scottish deerhound, deerhound", "178": "Weimaraner", "179": "Staffordshire bullterrier, Staffordshire bull terrier", "180": "American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier", "181": "Bedlington terrier", "182": "Border terrier", "183": "Kerry blue terrier", "184": "Irish terrier", "185": "Norfolk terrier", "186": "Norwich terrier", "187": "Yorkshire terrier", "188": "wire-haired fox terrier", "189": "Lakeland terrier", "190": "Sealyham terrier, Sealyham", "191": "Airedale, Airedale terrier", "192": "cairn, cairn terrier", "193": "Australian terrier", "194": "Dandie Dinmont, Dandie Dinmont terrier", "195": "Boston bull, Boston terrier", "196": "miniature schnauzer", "197": "giant schnauzer", "198": "standard schnauzer", "199": "Scotch terrier, Scottish terrier, Scottie", "200": "Tibetan terrier, chrysanthemum dog", "201": "silky terrier, Sydney silky", "202": "soft-coated wheaten terrier", "203": "West Highland white terrier", "204": "Lhasa, Lhasa apso", "205": "flat-coated retriever", "206": "curly-coated retriever", "207": "golden retriever", "208": "Labrador retriever", "209": "Chesapeake Bay retriever", "210": "German short-haired pointer", "211": "vizsla, Hungarian pointer", "212": "English setter", "213": "Irish setter, red setter", "214": "Gordon setter", "215": "Brittany spaniel", "216": "clumber, clumber spaniel", "217": "English springer, English springer spaniel", "218": "Welsh springer spaniel", "219": "cocker spaniel, English cocker spaniel, cocker", "220": "Sussex spaniel", "221": "Irish water spaniel", "222": "kuvasz", "223": "schipperke", "224": "groenendael", "225": "malinois", "226": "briard", "227": "kelpie", "228": "komondor", "229": "Old English sheepdog, bobtail", "230": "Shetland sheepdog, Shetland sheep dog, Shetland", "231": "collie", "232": "Border collie", "233": "Bouvier des Flandres, Bouviers des Flandres", "234": "Rottweiler", "235": "German shepherd, German shepherd dog, German police dog, alsatian", "236": "Doberman, Doberman pinscher", "237": "miniature pinscher", "238": "Greater Swiss Mountain dog", "239": "Bernese mountain dog", "240": "Appenzeller", "241": "EntleBucher", "242": "boxer", "243": "bull mastiff", "244": "Tibetan mastiff", "245": "French bulldog", "246": "Great Dane", "247": "Saint Bernard, St Bernard", "248": "Eskimo dog, husky", "249": "malamute, malemute, Alaskan malamute", "250": "Siberian husky", "251": "dalmatian, coach dog, carriage dog", "252": "affenpinscher, monkey pinscher, monkey dog", "253": "basenji", "254": "pug, pug-dog", "255": "Leonberg", "256": "Newfoundland, Newfoundland dog", "257": "Great Pyrenees", "258": "Samoyed, Samoyede", "259": "Pomeranian", "260": "chow, chow chow", "261": "keeshond", "262": "Brabancon griffon", "263": "Pembroke, Pembroke Welsh corgi", "264": "Cardigan, Cardigan Welsh corgi", "265": "toy poodle", "266": "miniature poodle", "267": "standard poodle", "268": "Mexican hairless", "269": "timber wolf, grey wolf, gray wolf, Canis lupus", "270": "white wolf, Arctic wolf, Canis lupus tundrarum", "271": "red wolf, maned wolf, Canis rufus, Canis niger", "272": "coyote, prairie wolf, brush wolf, Canis latrans", "273": "dingo, warrigal, warragal, Canis dingo", "274": "dhole, Cuon alpinus", "275": "African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus", "276": "hyena, hyaena", "277": "red fox, Vulpes vulpes", "278": "kit fox, Vulpes macrotis", "279": "Arctic fox, white fox, Alopex lagopus", "280": "grey fox, gray fox, Urocyon cinereoargenteus", "281": "tabby, tabby cat", "282": "tiger cat", "283": "Persian cat", "284": "Siamese cat, Siamese", "285": "Egyptian cat", "286": "cougar, puma, catamount, mountain lion, painter, panther, Felis concolor", "287": "lynx, catamount", "288": "leopard, Panthera pardus", "289": "snow leopard, ounce, Panthera uncia", "290": "jaguar, panther, Panthera onca, Felis onca", "291": "lion, king of beasts, Panthera leo", "292": "tiger, Panthera tigris", "293": "cheetah, chetah, Acinonyx jubatus", "294": "brown bear, bruin, Ursus arctos", "295": "American black bear, black bear, Ursus americanus, Euarctos americanus", "296": "ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus", "297": "sloth bear, Melursus ursinus, Ursus ursinus", "298": "mongoose", "299": "meerkat, mierkat", "300": "tiger beetle", "301": "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle", "302": "ground beetle, carabid beetle", "303": "long-horned beetle, longicorn, longicorn beetle", "304": "leaf beetle, chrysomelid", "305": "dung beetle", "306": "rhinoceros beetle", "307": "weevil", "308": "fly", "309": "bee", "310": "ant, emmet, pismire", "311": "grasshopper, hopper", "312": "cricket", "313": "walking stick, walkingstick, stick insect", "314": "cockroach, roach", "315": "mantis, mantid", "316": "cicada, cicala", "317": "leafhopper", "318": "lacewing, lacewing fly", "319": "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", "320": "damselfly", "321": "admiral", "322": "ringlet, ringlet butterfly", "323": "monarch, monarch butterfly, milkweed butterfly, Danaus plexippus", "324": "cabbage butterfly", "325": "sulphur butterfly, sulfur butterfly", "326": "lycaenid, lycaenid butterfly", "327": "starfish, sea star", "328": "sea urchin", "329": "sea cucumber, holothurian", "330": "wood rabbit, cottontail, cottontail rabbit", "331": "hare", "332": "Angora, Angora rabbit", "333": "hamster", "334": "porcupine, hedgehog", "335": "fox squirrel, eastern fox squirrel, Sciurus niger", "336": "marmot", "337": "beaver", "338": "guinea pig, Cavia cobaya", "339": "sorrel", "340": "zebra", "341": "hog, pig, grunter, squealer, Sus scrofa", "342": "wild boar, boar, Sus scrofa", "343": "warthog", "344": "hippopotamus, hippo, river horse, Hippopotamus amphibius", "345": "ox", "346": "water buffalo, water ox, Asiatic buffalo, Bubalus bubalis", "347": "bison", "348": "ram, tup", "349": "bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis", "350": "ibex, Capra ibex", "351": "hartebeest", "352": "impala, Aepyceros melampus", "353": "gazelle", "354": "Arabian camel, dromedary, Camelus dromedarius", "355": "llama", "356": "weasel", "357": "mink", "358": "polecat, fitch, foulmart, foumart, Mustela putorius", "359": "black-footed ferret, ferret, Mustela nigripes", "360": "otter", "361": "skunk, polecat, wood pussy", "362": "badger", "363": "armadillo", "364": "three-toed sloth, ai, Bradypus tridactylus", "365": "orangutan, orang, orangutang, Pongo pygmaeus", "366": "gorilla, Gorilla gorilla", "367": "chimpanzee, chimp, Pan troglodytes", "368": "gibbon, Hylobates lar", "369": "siamang, Hylobates syndactylus, Symphalangus syndactylus", "370": "guenon, guenon monkey", "371": "patas, hussar monkey, Erythrocebus patas", "372": "baboon", "373": "macaque", "374": "langur", "375": "colobus, colobus monkey", "376": "proboscis monkey, Nasalis larvatus", "377": "marmoset", "378": "capuchin, ringtail, Cebus capucinus", "379": "howler monkey, howler", "380": "titi, titi monkey", "381": "spider monkey, Ateles geoffroyi", "382": "squirrel monkey, Saimiri sciureus", "383": "Madagascar cat, ring-tailed lemur, Lemur catta", "384": "indri, indris, Indri indri, Indri brevicaudatus", "385": "Indian elephant, Elephas maximus", "386": "African elephant, Loxodonta africana", "387": "lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens", "388": "giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca", "389": "barracouta, snoek", "390": "eel", "391": "coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch", "392": "rock beauty, Holocanthus tricolor", "393": "anemone fish", "394": "sturgeon", "395": "gar, garfish, garpike, billfish, Lepisosteus osseus", "396": "lionfish", "397": "puffer, pufferfish, blowfish, globefish", "398": "abacus", "399": "abaya", "400": "academic gown, academic robe, judge's robe", "401": "accordion, piano accordion, squeeze box", "402": "acoustic guitar", "403": "aircraft carrier, carrier, flattop, attack aircraft carrier", "404": "airliner", "405": "airship, dirigible", "406": "altar", "407": "ambulance", "408": "amphibian, amphibious vehicle", "409": "analog clock", "410": "apiary, bee house", "411": "apron", "412": "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin", "413": "assault rifle, assault gun", "414": "backpack, back pack, knapsack, packsack, rucksack, haversack", "415": "bakery, bakeshop, bakehouse", "416": "balance beam, beam", "417": "balloon", "418": "ballpoint, ballpoint pen, ballpen, Biro", "419": "Band Aid", "420": "banjo", "421": "bannister, banister, balustrade, balusters, handrail", "422": "barbell", "423": "barber chair", "424": "barbershop", "425": "barn", "426": "barometer", "427": "barrel, cask", "428": "barrow, garden cart, lawn cart, wheelbarrow", "429": "baseball", "430": "basketball", "431": "bassinet", "432": "bassoon", "433": "bathing cap, swimming cap", "434": "bath towel", "435": "bathtub, bathing tub, bath, tub", "436": "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon", "437": "beacon, lighthouse, beacon light, pharos", "438": "beaker", "439": "bearskin, busby, shako", "440": "beer bottle", "441": "beer glass", "442": "bell cote, bell cot", "443": "bib", "444": "bicycle-built-for-two, tandem bicycle, tandem", "445": "bikini, two-piece", "446": "binder, ring-binder", "447": "binoculars, field glasses, opera glasses", "448": "birdhouse", "449": "boathouse", "450": "bobsled, bobsleigh, bob", "451": "bolo tie, bolo, bola tie, bola", "452": "bonnet, poke bonnet", "453": "bookcase", "454": "bookshop, bookstore, bookstall", "455": "bottlecap", "456": "bow", "457": "bow tie, bow-tie, bowtie", "458": "brass, memorial tablet, plaque", "459": "brassiere, bra, bandeau", "460": "breakwater, groin, groyne, mole, bulwark, seawall, jetty", "461": "breastplate, aegis, egis", "462": "broom", "463": "bucket, pail", "464": "buckle", "465": "bulletproof vest", "466": "bullet train, bullet", "467": "butcher shop, meat market", "468": "cab, hack, taxi, taxicab", "469": "caldron, cauldron", "470": "candle, taper, wax light", "471": "cannon", "472": "canoe", "473": "can opener, tin opener", "474": "cardigan", "475": "car mirror", "476": "carousel, carrousel, merry-go-round, roundabout, whirligig", "477": "carpenter's kit, tool kit", "478": "carton", "479": "car wheel", "480": "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM", "481": "cassette", "482": "cassette player", "483": "castle", "484": "catamaran", "485": "CD player", "486": "cello, violoncello", "487": "cellular telephone, cellular phone, cellphone, cell, mobile phone", "488": "chain", "489": "chainlink fence", "490": "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour", "491": "chain saw, chainsaw", "492": "chest", "493": "chiffonier, commode", "494": "chime, bell, gong", "495": "china cabinet, china closet", "496": "Christmas stocking", "497": "church, church building", "498": "cinema, movie theater, movie theatre, movie house, picture palace", "499": "cleaver, meat cleaver, chopper", "500": "cliff dwelling", "501": "cloak", "502": "clog, geta, patten, sabot", "503": "cocktail shaker", "504": "coffee mug", "505": "coffeepot", "506": "coil, spiral, volute, whorl, helix", "507": "combination lock", "508": "computer keyboard, keypad", "509": "confectionery, confectionary, candy store", "510": "container ship, containership, container vessel", "511": "convertible", "512": "corkscrew, bottle screw", "513": "cornet, horn, trumpet, trump", "514": "cowboy boot", "515": "cowboy hat, ten-gallon hat", "516": "cradle", "517": "crane", "518": "crash helmet", "519": "crate", "520": "crib, cot", "521": "Crock Pot", "522": "croquet ball", "523": "crutch", "524": "cuirass", "525": "dam, dike, dyke", "526": "desk", "527": "desktop computer", "528": "dial telephone, dial phone", "529": "diaper, nappy, napkin", "530": "digital clock", "531": "digital watch", "532": "dining table, board", "533": "dishrag, dishcloth", "534": "dishwasher, dish washer, dishwashing machine", "535": "disk brake, disc brake", "536": "dock, dockage, docking facility", "537": "dogsled, dog sled, dog sleigh", "538": "dome", "539": "doormat, welcome mat", "540": "drilling platform, offshore rig", "541": "drum, membranophone, tympan", "542": "drumstick", "543": "dumbbell", "544": "Dutch oven", "545": "electric fan, blower", "546": "electric guitar", "547": "electric locomotive", "548": "entertainment center", "549": "envelope", "550": "espresso maker", "551": "face powder", "552": "feather boa, boa", "553": "file, file cabinet, filing cabinet", "554": "fireboat", "555": "fire engine, fire truck", "556": "fire screen, fireguard", "557": "flagpole, flagstaff", "558": "flute, transverse flute", "559": "folding chair", "560": "football helmet", "561": "forklift", "562": "fountain", "563": "fountain pen", "564": "four-poster", "565": "freight car", "566": "French horn, horn", "567": "frying pan, frypan, skillet", "568": "fur coat", "569": "garbage truck, dustcart", "570": "gasmask, respirator, gas helmet", "571": "gas pump, gasoline pump, petrol pump, island dispenser", "572": "goblet", "573": "go-kart", "574": "golf ball", "575": "golfcart, golf cart", "576": "gondola", "577": "gong, tam-tam", "578": "gown", "579": "grand piano, grand", "580": "greenhouse, nursery, glasshouse", "581": "grille, radiator grille", "582": "grocery store, grocery, food market, market", "583": "guillotine", "584": "hair slide", "585": "hair spray", "586": "half track", "587": "hammer", "588": "hamper", "589": "hand blower, blow dryer, blow drier, hair dryer, hair drier", "590": "hand-held computer, hand-held microcomputer", "591": "handkerchief, hankie, hanky, hankey", "592": "hard disc, hard disk, fixed disk", "593": "harmonica, mouth organ, harp, mouth harp", "594": "harp", "595": "harvester, reaper", "596": "hatchet", "597": "holster", "598": "home theater, home theatre", "599": "honeycomb", "600": "hook, claw", "601": "hoopskirt, crinoline", "602": "horizontal bar, high bar", "603": "horse cart, horse-cart", "604": "hourglass", "605": "iPod", "606": "iron, smoothing iron", "607": "jack-o'-lantern", "608": "jean, blue jean, denim", "609": "jeep, landrover", "610": "jersey, T-shirt, tee shirt", "611": "jigsaw puzzle", "612": "jinrikisha, ricksha, rickshaw", "613": "joystick", "614": "kimono", "615": "knee pad", "616": "knot", "617": "lab coat, laboratory coat", "618": "ladle", "619": "lampshade, lamp shade", "620": "laptop, laptop computer", "621": "lawn mower, mower", "622": "lens cap, lens cover", "623": "letter opener, paper knife, paperknife", "624": "library", "625": "lifeboat", "626": "lighter, light, igniter, ignitor", "627": "limousine, limo", "628": "liner, ocean liner", "629": "lipstick, lip rouge", "630": "Loafer", "631": "lotion", "632": "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system", "633": "loupe, jeweler's loupe", "634": "lumbermill, sawmill", "635": "magnetic compass", "636": "mailbag, postbag", "637": "mailbox, letter box", "638": "maillot", "639": "maillot, tank suit", "640": "manhole cover", "641": "maraca", "642": "marimba, xylophone", "643": "mask", "644": "matchstick", "645": "maypole", "646": "maze, labyrinth", "647": "measuring cup", "648": "medicine chest, medicine cabinet", "649": "megalith, megalithic structure", "650": "microphone, mike", "651": "microwave, microwave oven", "652": "military uniform", "653": "milk can", "654": "minibus", "655": "miniskirt, mini", "656": "minivan", "657": "missile", "658": "mitten", "659": "mixing bowl", "660": "mobile home, manufactured home", "661": "Model T", "662": "modem", "663": "monastery", "664": "monitor", "665": "moped", "666": "mortar", "667": "mortarboard", "668": "mosque", "669": "mosquito net", "670": "motor scooter, scooter", "671": "mountain bike, all-terrain bike, off-roader", "672": "mountain tent", "673": "mouse, computer mouse", "674": "mousetrap", "675": "moving van", "676": "muzzle", "677": "nail", "678": "neck brace", "679": "necklace", "680": "nipple", "681": "notebook, notebook computer", "682": "obelisk", "683": "oboe, hautboy, hautbois", "684": "ocarina, sweet potato", "685": "odometer, hodometer, mileometer, milometer", "686": "oil filter", "687": "organ, pipe organ", "688": "oscilloscope, scope, cathode-ray oscilloscope, CRO", "689": "overskirt", "690": "oxcart", "691": "oxygen mask", "692": "packet", "693": "paddle, boat paddle", "694": "paddlewheel, paddle wheel", "695": "padlock", "696": "paintbrush", "697": "pajama, pyjama, pj's, jammies", "698": "palace", "699": "panpipe, pandean pipe, syrinx", "700": "paper towel", "701": "parachute, chute", "702": "parallel bars, bars", "703": "park bench", "704": "parking meter", "705": "passenger car, coach, carriage", "706": "patio, terrace", "707": "pay-phone, pay-station", "708": "pedestal, plinth, footstall", "709": "pencil box, pencil case", "710": "pencil sharpener", "711": "perfume, essence", "712": "Petri dish", "713": "photocopier", "714": "pick, plectrum, plectron", "715": "pickelhaube", "716": "picket fence, paling", "717": "pickup, pickup truck", "718": "pier", "719": "piggy bank, penny bank", "720": "pill bottle", "721": "pillow", "722": "ping-pong ball", "723": "pinwheel", "724": "pirate, pirate ship", "725": "pitcher, ewer", "726": "plane, carpenter's plane, woodworking plane", "727": "planetarium", "728": "plastic bag", "729": "plate rack", "730": "plow, plough", "731": "plunger, plumber's helper", "732": "Polaroid camera, Polaroid Land camera", "733": "pole", "734": "police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria", "735": "poncho", "736": "pool table, billiard table, snooker table", "737": "pop bottle, soda bottle", "738": "pot, flowerpot", "739": "potter's wheel", "740": "power drill", "741": "prayer rug, prayer mat", "742": "printer", "743": "prison, prison house", "744": "projectile, missile", "745": "projector", "746": "puck, hockey puck", "747": "punching bag, punch bag, punching ball, punchball", "748": "purse", "749": "quill, quill pen", "750": "quilt, comforter, comfort, puff", "751": "racer, race car, racing car", "752": "racket, racquet", "753": "radiator", "754": "radio, wireless", "755": "radio telescope, radio reflector", "756": "rain barrel", "757": "recreational vehicle, RV, R.V.", "758": "reel", "759": "reflex camera", "760": "refrigerator, icebox", "761": "remote control, remote", "762": "restaurant, eating house, eating place, eatery", "763": "revolver, six-gun, six-shooter", "764": "rifle", "765": "rocking chair, rocker", "766": "rotisserie", "767": "rubber eraser, rubber, pencil eraser", "768": "rugby ball", "769": "rule, ruler", "770": "running shoe", "771": "safe", "772": "safety pin", "773": "saltshaker, salt shaker", "774": "sandal", "775": "sarong", "776": "sax, saxophone", "777": "scabbard", "778": "scale, weighing machine", "779": "school bus", "780": "schooner", "781": "scoreboard", "782": "screen, CRT screen", "783": "screw", "784": "screwdriver", "785": "seat belt, seatbelt", "786": "sewing machine", "787": "shield, buckler", "788": "shoe shop, shoe-shop, shoe store", "789": "shoji", "790": "shopping basket", "791": "shopping cart", "792": "shovel", "793": "shower cap", "794": "shower curtain", "795": "ski", "796": "ski mask", "797": "sleeping bag", "798": "slide rule, slipstick", "799": "sliding door", "800": "slot, one-armed bandit", "801": "snorkel", "802": "snowmobile", "803": "snowplow, snowplough", "804": "soap dispenser", "805": "soccer ball", "806": "sock", "807": "solar dish, solar collector, solar furnace", "808": "sombrero", "809": "soup bowl", "810": "space bar", "811": "space heater", "812": "space shuttle", "813": "spatula", "814": "speedboat", "815": "spider web, spider's web", "816": "spindle", "817": "sports car, sport car", "818": "spotlight, spot", "819": "stage", "820": "steam locomotive", "821": "steel arch bridge", "822": "steel drum", "823": "stethoscope", "824": "stole", "825": "stone wall", "826": "stopwatch, stop watch", "827": "stove", "828": "strainer", "829": "streetcar, tram, tramcar, trolley, trolley car", "830": "stretcher", "831": "studio couch, day bed", "832": "stupa, tope", "833": "submarine, pigboat, sub, U-boat", "834": "suit, suit of clothes", "835": "sundial", "836": "sunglass", "837": "sunglasses, dark glasses, shades", "838": "sunscreen, sunblock, sun blocker", "839": "suspension bridge", "840": "swab, swob, mop", "841": "sweatshirt", "842": "swimming trunks, bathing trunks", "843": "swing", "844": "switch, electric switch, electrical switch", "845": "syringe", "846": "table lamp", "847": "tank, army tank, armored combat vehicle, armoured combat vehicle", "848": "tape player", "849": "teapot", "850": "teddy, teddy bear", "851": "television, television system", "852": "tennis ball", "853": "thatch, thatched roof", "854": "theater curtain, theatre curtain", "855": "thimble", "856": "thresher, thrasher, threshing machine", "857": "throne", "858": "tile roof", "859": "toaster", "860": "tobacco shop, tobacconist shop, tobacconist", "861": "toilet seat", "862": "torch", "863": "totem pole", "864": "tow truck, tow car, wrecker", "865": "toyshop", "866": "tractor", "867": "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi", "868": "tray", "869": "trench coat", "870": "tricycle, trike, velocipede", "871": "trimaran", "872": "tripod", "873": "triumphal arch", "874": "trolleybus, trolley coach, trackless trolley", "875": "trombone", "876": "tub, vat", "877": "turnstile", "878": "typewriter keyboard", "879": "umbrella", "880": "unicycle, monocycle", "881": "upright, upright piano", "882": "vacuum, vacuum cleaner", "883": "vase", "884": "vault", "885": "velvet", "886": "vending machine", "887": "vestment", "888": "viaduct", "889": "violin, fiddle", "890": "volleyball", "891": "waffle iron", "892": "wall clock", "893": "wallet, billfold, notecase, pocketbook", "894": "wardrobe, closet, press", "895": "warplane, military plane", "896": "washbasin, handbasin, washbowl, lavabo, wash-hand basin", "897": "washer, automatic washer, washing machine", "898": "water bottle", "899": "water jug", "900": "water tower", "901": "whiskey jug", "902": "whistle", "903": "wig", "904": "window screen", "905": "window shade", "906": "Windsor tie", "907": "wine bottle", "908": "wing", "909": "wok", "910": "wooden spoon", "911": "wool, woolen, woollen", "912": "worm fence, snake fence, snake-rail fence, Virginia fence", "913": "wreck", "914": "yawl", "915": "yurt", "916": "web site, website, internet site, site", "917": "comic book", "918": "crossword puzzle, crossword", "919": "street sign", "920": "traffic light, traffic signal, stoplight", "921": "book jacket, dust cover, dust jacket, dust wrapper", "922": "menu", "923": "plate", "924": "guacamole", "925": "consomme", "926": "hot pot, hotpot", "927": "trifle", "928": "ice cream, icecream", "929": "ice lolly, lolly, lollipop, popsicle", "930": "French loaf", "931": "bagel, beigel", "932": "pretzel", "933": "cheeseburger", "934": "hotdog, hot dog, red hot", "935": "mashed potato", "936": "head cabbage", "937": "broccoli", "938": "cauliflower", "939": "zucchini, courgette", "940": "spaghetti squash", "941": "acorn squash", "942": "butternut squash", "943": "cucumber, cuke", "944": "artichoke, globe artichoke", "945": "bell pepper", "946": "cardoon", "947": "mushroom", "948": "Granny Smith", "949": "strawberry", "950": "orange", "951": "lemon", "952": "fig", "953": "pineapple, ananas", "954": "banana", "955": "jackfruit, jak, jack", "956": "custard apple", "957": "pomegranate", "958": "hay", "959": "carbonara", "960": "chocolate sauce, chocolate syrup", "961": "dough", "962": "meat loaf, meatloaf", "963": "pizza, pizza pie", "964": "potpie", "965": "burrito", "966": "red wine", "967": "espresso", "968": "cup", "969": "eggnog", "970": "alp", "971": "bubble", "972": "cliff, drop, drop-off", "973": "coral reef", "974": "geyser", "975": "lakeside, lakeshore", "976": "promontory, headland, head, foreland", "977": "sandbar, sand bar", "978": "seashore, coast, seacoast, sea-coast", "979": "valley, vale", "980": "volcano", "981": "ballplayer, baseball player", "982": "groom, bridegroom", "983": "scuba diver", "984": "rapeseed", "985": "daisy", "986": "yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum", "987": "corn", "988": "acorn", "989": "hip, rose hip, rosehip", "990": "buckeye, horse chestnut, conker", "991": "coral fungus", "992": "agaric", "993": "gyromitra", "994": "stinkhorn, carrion fungus", "995": "earthstar", "996": "hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa", "997": "bolete", "998": "ear, spike, capitulum", "999": "toilet tissue, toilet paper, bathroom tissue"} diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/README.md b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/README.md new file mode 100644 index 000000000..dc29bf331 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/README.md @@ -0,0 +1,43 @@ +# Usage + +## Step1: Download datasets + +Contains MNIST, CIFAR10&CIFAR100, TinyImageNet_200, MiniImageNet_1K, ImageNet_1K, Caltech101&Caltech256 and more etc. + +- [Google Driver](https://drive.google.com/drive/folders/1f-NSpZc07Qlzhgi6EbBEI1wTkN1MxPbQ?usp=sharing) +- [Baidu Driver](https://pan.baidu.com/s/1arNM38vhDT7p4jKeD4sqwA?pwd=llot) + +## Step2: Prepare the dataset in the following format + +```text +# Dataset struct +- ImageNet_1K + - ILSVRC2012_img_train + - ILSVRC2012_img_train.tar + - ILSVRC2012_img_val + - ILSVRC2012_img_val.tar + - valprep.sh +``` + +## Step3: Preprocess the dataset + +```bash +cd /scripts +bash preprocess_imagenet.sh +``` + +## Step4: Check that the final dataset directory schema is completely correct + +```text +# Train dataset +- ImageNet_1K + - ILSVRC2012_img_train + - n01440764 + - n01440764_18.JPEG + - ... + - ILSVRC2012_img_val + - n01440764 + - ILSVRC2012_val_00000293.JPEG + - ... +``` + diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/dataset.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/dataset.py new file mode 100644 index 000000000..19cb0bf7c --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/dataset.py @@ -0,0 +1,230 @@ +# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import queue +import sys +import threading +from glob import glob + +import cv2 +import torch +from PIL import Image +from torch.utils.data import Dataset, DataLoader +from torchvision import transforms +from torchvision.datasets.folder import find_classes +from torchvision.transforms import TrivialAugmentWide + +import imgproc + +__all__ = [ + "ImageDataset", + "PrefetchGenerator", "PrefetchDataLoader", "CPUPrefetcher", "CUDAPrefetcher", +] + +# Image formats supported by the image processing library +IMG_EXTENSIONS = ("jpg", "jpeg", "png", "ppm", "bmp", "pgm", "tif", "tiff", "webp") + +# The delimiter is not the same between different platforms +if sys.platform == "win32": + delimiter = "\\" +else: + delimiter = "/" + + +class ImageDataset(Dataset): + """Define training/valid dataset loading methods. + + Args: + image_dir (str): Train/Valid dataset address. + image_size (int): Image size. + mode (str): Data set loading method, the training data set is for data enhancement, + and the verification data set is not for data enhancement. + """ + + def __init__(self, image_dir: str, image_size: int, mean: list, std: list, mode: str) -> None: + super(ImageDataset, self).__init__() + # Iterate over all image paths + self.image_file_paths = glob(f"{image_dir}/*/*") + # Form image class label pairs by the folder where the image is located + _, self.class_to_idx = find_classes(image_dir) + self.image_size = image_size + self.mode = mode + self.delimiter = delimiter + + if self.mode == "Train": + # Use PyTorch's own data enhancement to enlarge and enhance data + self.pre_transform = transforms.Compose([ + transforms.RandomResizedCrop(self.image_size), + TrivialAugmentWide(), + transforms.RandomRotation([0, 270]), + transforms.RandomHorizontalFlip(0.5), + transforms.RandomVerticalFlip(0.5), + ]) + elif self.mode == "Valid" or self.mode == "Test": + # Use PyTorch's own data enhancement to enlarge and enhance data + self.pre_transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop([self.image_size, self.image_size]), + ]) + else: + raise "Unsupported data read type. Please use `Train` or `Valid` or `Test`" + + self.post_transform = transforms.Compose([ + transforms.ConvertImageDtype(torch.float), + transforms.Normalize(mean, std) + ]) + + def __getitem__(self, batch_index: int) -> [torch.Tensor, int]: + image_dir, image_name = self.image_file_paths[batch_index].split(self.delimiter)[-2:] + # Read a batch of image data + if image_name.split(".")[-1].lower() in IMG_EXTENSIONS: + image = cv2.imread(self.image_file_paths[batch_index]) + target = self.class_to_idx[image_dir] + else: + raise ValueError(f"Unsupported image extensions, Only support `{IMG_EXTENSIONS}`, " + "please check the image file extensions.") + + # BGR to RGB + image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + + # OpenCV convert PIL + image = Image.fromarray(image) + + # Data preprocess + image = self.pre_transform(image) + + # Convert image data into Tensor stream format (PyTorch). + # Note: The range of input and output is between [0, 1] + tensor = imgproc.image_to_tensor(image, False, False) + + # Data postprocess + tensor = self.post_transform(tensor) + + return {"image": tensor, "target": target} + + def __len__(self) -> int: + return len(self.image_file_paths) + + +class PrefetchGenerator(threading.Thread): + """A fast data prefetch generator. + + Args: + generator: Data generator. + num_data_prefetch_queue (int): How many early data load queues. + """ + + def __init__(self, generator, num_data_prefetch_queue: int) -> None: + threading.Thread.__init__(self) + self.queue = queue.Queue(num_data_prefetch_queue) + self.generator = generator + self.daemon = True + self.start() + + def run(self) -> None: + for item in self.generator: + self.queue.put(item) + self.queue.put(None) + + def __next__(self): + next_item = self.queue.get() + if next_item is None: + raise StopIteration + return next_item + + def __iter__(self): + return self + + +class PrefetchDataLoader(DataLoader): + """A fast data prefetch dataloader. + + Args: + num_data_prefetch_queue (int): How many early data load queues. + kwargs (dict): Other extended parameters. + """ + + def __init__(self, num_data_prefetch_queue: int, **kwargs) -> None: + self.num_data_prefetch_queue = num_data_prefetch_queue + super(PrefetchDataLoader, self).__init__(**kwargs) + + def __iter__(self): + return PrefetchGenerator(super().__iter__(), self.num_data_prefetch_queue) + + +class CPUPrefetcher: + """Use the CPU side to accelerate data reading. + + Args: + dataloader (DataLoader): Data loader. Combines a dataset and a sampler, + and provides an iterable over the given dataset. + """ + + def __init__(self, dataloader) -> None: + self.original_dataloader = dataloader + self.data = iter(dataloader) + + def next(self): + try: + return next(self.data) + except StopIteration: + return None + + def reset(self): + self.data = iter(self.original_dataloader) + + def __len__(self) -> int: + return len(self.original_dataloader) + + +class CUDAPrefetcher: + """Use the CUDA side to accelerate data reading. + + Args: + dataloader (DataLoader): Data loader. Combines a dataset and a sampler, and provides an iterable over the given dataset. + device (torch.device): Specify running device. + """ + + def __init__(self, dataloader, device: torch.device): + self.batch_data = None + self.original_dataloader = dataloader + self.device = device + + self.data = iter(dataloader) + self.stream = torch.cuda.Stream() + self.preload() + + def preload(self): + try: + self.batch_data = next(self.data) + except StopIteration: + self.batch_data = None + return None + + with torch.cuda.stream(self.stream): + for k, v in self.batch_data.items(): + if torch.is_tensor(v): + self.batch_data[k] = self.batch_data[k].to(self.device, non_blocking=True) + + def next(self): + torch.cuda.current_stream().wait_stream(self.stream) + batch_data = self.batch_data + self.preload() + return batch_data + + def reset(self): + self.data = iter(self.original_dataloader) + self.preload() + + def __len__(self) -> int: + return len(self.original_dataloader) diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/figure/n01440764_36.JPEG b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/figure/n01440764_36.JPEG new file mode 100644 index 0000000000000000000000000000000000000000..a04ee688e532213638cd21e95ed8af5bfc2505dc GIT binary patch literal 35135 zcmeFZWmp``7AQKnyE`Eef(Lg=@Zc^%0>dD|XK)Wmkl+wJ!Ciwp1SdEQt_kig$zX3t z&OYbtd+(F|-gm$EC)Hn7*OFRm)#_@i)m?Wpci#Z`&*c>500;;O00sC4xZ9_8Qnz+= zafDbqI??lTJ^=_lQ&d4_0^BG3U*&%l_cWyaUkLpB)&KC~Z4V}#_&Xu|vgiC4pa9^qw1iXa z;3ew6TK&dkCa3%x!vm)v01V;fUp)N3FdYBE{F3(!wbKz`YlL`-k$rhNK8De)s1d1OO7?xeuN(0p8*M<)@J4XjAIQ%OqgDhSC8-C{EX#X$xml>K?)_>9OO+$s)*#JQh zT{~q6976b;9+Y1+IGPZi(S8AnGXEtT%?j-3>hw1hsw3FO%EkfY;A#(VgDtCdFJ0{y z4i4j(xw<&YgB(C$GZ&Br90AbwbOQa08S^*uJ?5UGXm3Ri-~I3IKawlhPFq%A8?NfV zod0)>3d9QZHw>zoor|`a)!%TqK#-lCHps(85u%{2qHONyXm>9V{V(+2m@%v!!Cp`8 zY^?tB6yER7EBu9qw?wl5S(>@pxxlL!ZXmGB|AJljFZAEpvCOTc9qk;!|K(hQ-}0Zy z|C<1B0#J2yxc4=Ni=&g8s|y75*TA@^VcNk5(*K0THg|M^52*hMj|D%(*8d~>uQQO~ zUKsou`THCr{sr9+++WulWbAvGwEZ52=Ky$q39rKU(eNs1E`S_>aQ9zVTEu@}X#vj1nBVY1Rx54P>EwQE1o@MB++a)=j8(jj!C02#cCaW zKL$ehNwX^7Jd%miwvg?}b`c;O*I5CbRK4wlx~`teF@MN;R~_Fqv38bQ(>=LATtF-THsSx=0_Ojia5o3Q zM*5|b03Zp-Ob-2EOJ_qbQiq!Oq+!@{bs&wrXKtf=rJ8r^LXh|*#z zVaPD@PjzjR_HQV_;S=}YsCBkzMBCWDu^MTu@il7728r~~!xEHQf}?xiNW`_BJ)ee! zChwS=2B~d7_G~DUE5x_wddaEbxMt! z?l?J8`om*tVVz1oDgVMC^C% zoWGO}_z`NL=7Q+mF}G7~u?Upo7clyU{yu5*_(saNU&vI;uW}RdIK+9Ex{VQLOVH~W}IMd$+>7ET087hrU3%!%LIQGk3`z~BraHY z#b%r9>ri6fJ5pHh;1^vx{9Th<+KTG4wls4+mI76>~lkU2Vi39lQDojEDoOF;b;?l z__%st$#lC`hL1GJ`)0)Z=26vD@y{BEFluzS4ycw&RPHy_-i=QOi&eKwJChXc2?@Zt zdBdsCvmpNWGt{eyQ98qrrNbdm`cR8O-jsosvDticbpMQWc;tfvc3?~Ep{ydmmiG!XP3(y0Mh`NO5|YN@@GdU}^8|1aK1ef~W2dkmG+XU+@`EW1y(W zA)6W61da2wLwDb1x2IZ9H9kwN(eLPU0M)##+*J`UV#ysUji5bkxDbc?=N3HpemtUA z9YG{QPo^~{H7>$b!P1QWphTojNUb_z$WF+!T(HNFOK(@TJs+cXR(55Y50MqwJJr55 z{fEfCBQSR0*Mx1((Y2T7SUg_7j^`|yfsG%5)-S0T62C6Z-2o)MHoS<>s4AW6f@Urg z+qhIlB<=vto^;Zd%WMAAL@TnfUO1_8%mL`TphKtPCX$GzLX$Bw7=HkOXnEY~$Q$tttj@cdb#u%5boc;&1>edoY%o9qMo+Bzo!@lQF5xG=hpAF4hDj)gT@ zv7T&`(i{L{ef))-uil3iz6)~OErc{67~tB>yAUc!WD5n$WiMX!$2Z`do3J5ATci)A z51F~2`+of>U#N!nMvWIUA5tqkU4N^t|C2@{A2yWHN!hWR-bZLw4ZXVXpU|xx6iU~| zBL4~#scHLEw!A4>y;)#>^Jyu&!0p?`fGq=4`jl!Yn}X-{b4a7F)@$DYeIWGFE5=TR zGq}TU50?!+eAT8PJ39|*X4)BWrz9lXl~VWX)JATd^}EaJ_5LS74{;; zz@T2Iq2oO#1w#g#O^E}Jt=ts>**mzVXZP6pmb(qrSZLp*rFc_Csq707so>ZqH{>jGdArvnKyB%^D_YCPma8Uj zovqe4)*3eIL=3-TJEZDkIG#G3?N-6hX0EgJp$xBNJf}zP`nE4yk)-u;55`~uO{XNh z;b}EcSvln040qGlD&M}Z!uT+6@_Ln&q!3!KP2CHDC3gUnkRF3O0D3wd7Fj;DGDE@D z!ot)7>u{pp0cc#C;tx*WlpJS2NSydMfJ1_`UDH0;02AJi_6z8bS(jdbdbk6R15b4w8U5Z4qnt8!MH2+;Bvjl~=&89_jKw z+cIx4%nVWnm1d23w@3qg{XOvrB$Jk3+4;&$?VT2U@p?_`KX7U6mm0e@yP0;E& z6_pMNRfGUKQ&%)g@%Y^G5X`Mc-;yKQ?)@Q%-I?>;zr7}dY}xJ5;i9ruVq~mjfvcpx zT4bjfI2&ebIOaKdfa44?bp>u!PKloS1-i(r7NDOI6<+(>7ADK9 z+QFB;@QXNXg1~RXB`~}S6XAO0tRl98h6s*Xd-YB3d_6C}>=sEq$I$YV z_L@jsY|c<@=Y_fOTOiFD@n_TR2^Pz6YP`i;h1=)PF4#h8kR(bc=2e2STZ;}75r6g`5eX8Q|EJ$d{&scUp2g zavFvZf#dK!(}o8-0{TVdhKwZ(h40iLxg~05w~dQ;8Yrk#V`IihMn*(w54`Cob){4q za%-6_c}Y(=G9KvvN%W)9P|fYZWZHC|?GMZn1h1J?1Jc>)<=#k6^g{GnsM z`pqNw4dY>l=#!sqcjG<6IxpOJ<6|OwYI+~a4J@e68H_aC0lr=qT24bF9NhTjUWmR~ zpEr^SbFa5wJXqdWGkxaf%HgIpq|Ki>vHE`Cs~s#H#II|hqgv;_-L`8hze~%ZAAgfb z^E#OH{i`NTxo^|@xBlX3<$33t^fTv_d#~%AKKC(YXkNARTugnNw!glqHGl~?+&V7R z*wv=8f*OQ|s^lJ6Cd{ELgZkrBvV`8F)QLDN#v zfZi`MEF6Zoue+yy2hg~1=nz2nV4e`Y14!#T+4`dS;_c|(0XAFspWOyq84i^$RZ-3x zqP#?OLzI417=8h#R|)Fx+cWay+E5tOB@@p1EN}j zq*{^ z_7|G=j2AT*=6H9C7JRJQXsN%*23)mV<9SQU`SqOIQ;-hT2vpyYu;qpc%BI zvSYU7i{nN*Q}H`n+_&v(YOZ0Coid|PRAC0Jw%9N0z$xp3+EUKyARe<}TuBA^<5}9F zRo)~*#3zcNm#7(yKNp{V7>Bh_ylE6=_j2fcIR4O#Bcdg;by{_C?-{%I4u>=S8&2j# zx=@YE<^(&tYN@fIC&rSgma}T%KKkH6<@d9noozXTszoY&+jBy0r#IT%H<@bUuLio` zyxII&cEP*VqMNl_TrV9q_mr|J1pbIQt2>Z(#RhT>6AZDBm7>FWQTNkiIWJw_Qt%F1 zY{IPWfc2!Ja`{58$-5n!T>xikXWrJ`YfArclhw;1TV<0Eh*~{uJ6|vUt)+{*btuy9 zrgnb+TymTR{@Bv;M7BM=Smm>dw8O@g{>@-Hq$S?VKku2ibUu}FoKDjxho1Tu8grWo z-c-uNYK2-EgfD#PDv&A*;GZwib#rHac;fUGOtAwE2Pl1W%g*(8ktp}e z`5SAxO{XO)EYIehIddsQ?QqF4 zZ)07IlAt}!Qzte$$Ig(@*`j0(+;iXFuVNa$1B{0r7ZXo7Z}&Qg2YqIrKLDms?E+)! zCeD*>P3Fta?QX$s8!bDna%?Dm3GOX>mrtg#V~>X-UbjU|diig&KNqUB6vRiC4%DxnJfRU_?jhvi z;bQVQ;3?(ayAKWVWUyEO^J~Tr=MxJb#fs^x#yFg2=#yby@Y7k?a&_->&FZO%48`q8 z4bjRN*rG&x8(=XL&Ur}**J2u!xTmJFQ97h$I4s3IryXld48m-P{oIt{vnf&{ZgmdpUcC7 z&|1jpEDH(xUdTPo=fkU%olJ0W?!K8uIzDuywT`$Sz9{)EtXT0N; z2C;VJouh?8O(#ajJH|#)s?8}2G+GBfFi(k*aXYIYYMb0%!RiqE>^)aXRjnJta$ukL z+}L(&zV1zv`~2Zzo7E4IJAhpE5i9}nBGE(<;m~rVS_)$4U42eD*VgInCNV8~rgI1A zphrL)uB{6zbkOroVuO#Q9c$;S2POO&dU)(-sn_#tFn{Yi0CokK-POMA$IOD9zra{) z3*Oud?>aK?L)$xm!mgxT%@dItvmf=B&C{;to!jZ&M$k6@lFO+n_^ZXwt2+Q>;)bCx zQO@x9hbAOjlSN#bOhoXuXzU;)47d7 zV`|_m+$)T;GDwN_?B#o19c3zUNTs_p)c@06cRKQr3Q7q zklenZ8+W_|D8l$~Q?^L#!0QQG_AlU1ERKa*1~3BAnI8VuI{>iGLE25nle|W}6lEWh zG;opbqLem|b^EUR%4B5CUm#_}-q+2trUZgf*=VygRTE*GGHTjT0;Ge#V(zfUub z{7Sx`qs@j+arAW!dZO0x)tTW=mO}PGX*H3z5Gedv@hXg}$VH-f3_=w=u?kAxMTLcH6%p*59n7qDk#`FuR-wZrWe+!yi=6{1sah?*Q3~ z>G~pqdfNsJUUGiDT=K32% z-|foymdGo^g>PP7Ei=x2JB56Dct2wG(rBvQ?{XX*WI|F?A6y?J(JNp?4gGmt(*&(! z7?j%S{CFAVY}?L=H&oIzbGV}$xkLBi%jRU$gWx#@97c?Deg2unQdiUSp{=v0%k6iw zzh}1}{F=+vxu1i+pV!3$JOx+)9N~c@5P%+_4zPyTUEmq=s|F8<0f_!@IOqX_08V(I z2`=!rhToAXv|phgxQ@H;WKdZT8%qE{MFqeLXF~^|ArJzPel>ubfItaA`Beu1%n_*m zU4Mn}_+LEm5EBCTuRzoN%sd!@=il|9V4L4s;P{X5d><@Qfam*;{o4PUai{-Jyi*V! z@8o&%gkOaFiO3T{dTu@u9zGGi`ye3zDFflRzPmdBAOrE=a{Drn{$2koMuv)j^e@}s zef^L92&Dh9AA$D2?MFoTO9sLt#DB|%#}0m{zuEjJmH|1Vn**_Z|49J@epVJ)j$MZzejU1)Bj>|u@z_5S5c>Tadfn^wQ-^6 z;S}QJ;o#+Er?&)~*@N62!M60=oczpikzfl;5ltDne+h&`;>`c*n7g|>r#mmFBiM?I zTUc0_>j@7R4-W^Ng9GB};9};%;Q)CE*Z0d18~uMb`pyNwskq7<}^Pkc6$6f!(1OJHm&*=K&u7Bi#f5iM}bp3JHKk~poV*WF_{-1Uz<@Q;}P-;OT${LWvqLLdkDRFFG-&Ii5_f;0dL@jl&`NbrP=ihQ5YQBhG) zG0-tFG0-tEFtPElF|ly5FfeckaB%VP@d@xTu?dL?@rmGd{Cgn?_bri-(cum8u`saU z*ngOQFNTl+=tDw)uO9GwF@!DvHT=_10MdUJK>z^YD~3dao7pA_^)38k{HrHwgg=0Tl@a1@&GHJi)(_BqVx> zfI`f`!%Of?`T^rpO)XRsZVhuYXE5Y-ARjPxOeQ8X>*)C8?~4(nwz!aZrD<=P2nVSO z4G_Kv^bFHqO+vgzlQi%wYjqd}Rw{IVG|p^s(lqj(nPhGWrhiX0Zo-X3Kvy5OoXmV| zld3tSnt^2|S9^d^Ft!l?n6W=;78?>YgF%#v03H(5UZ$x@7eivRc=sac`WZM|La5b96>L|K02ws56!LN)S_r_I&qNS`QlHaknjz` ze4=umpO<9pGKnYKrRQ3SqDp&KVB#g(iPG;9a~zj*DYuz7+)S;Q zzhd?NBvYCR9rFfsRWMA8xM1dsPKRM28%~6%s$;!NkaSn0PelTW!=~t5Tr-LodvyIY z3}RT8mVh~ib2`E-=hV~nVZu1AV{5|h{KkssQ{o-vibu9&43ga1!5V24KHcp z?LaSo8lY0a_Xa#$ITwnEOC6Qo?n|uh=*LkkUf&wr%h-PSkLT3&cvV0(ms#-cjo;))?Ob1~Pap_7ik^7x+P@+7z0f-$UrXkfI5gpD?6mufIQe zLERL!*6ZVEF$Gd_hXu$Nu-=9bH?q?GP)?5d8EetpsmdcIjOh@;ki|l}(nh~si(G-_ zY@dvy`w{O|XYhj!Iub!KdH%{lEk1c)R?$~gwR~8F;acUWc16b0J z@mMMRr}0J6m1y6$+%JpI<5H(?40?sdKULs}oW%Quo$R{1={bvXf1P;gXh`jMOs6`E zHk%%@oFVo+?_;MVf!wHWGG_ZA_FWtwdwIm12RQ7ctcE+9gOSl?#6`wJp1#f*h-}vP3gfVmYM`S?*PZ$9 ziIm{`2xs(^&xUsY;=0wSrrf5#Hd(IJK--&jHNLlvny zgo9v~3dPKb)c}RJ?Qu^Z9Go*eN()m)S;`!n5KW;`Xm|=(jlLeX?oKdz^UxXG4fR+U z7@$*)2&sTs6!{?ZYbO}oj7uCIO>(fpT??~O+aS`g(V`|e*u+Q2J2zH}Ms?;2O=R3Y z)63u=j^s(<_~{66fXp_F;3yhy>;*25J?*0Vjg6!TlQBag>KcWH4{4_a05lHA9>7so3 z26=aQuV9n`b}I47&zODG%lTYljDHJ?Y4Bu+EFj*Jge?|V;;85O8)oWvIqlTW-201= z?HKE-t43gS0_|kK{TNRi1I?IxGU=zGqny(yK);nkfPzh<&?i5pBA&HMrT&aXh|>W( zI;ZA7Bv+U;=L^(IT=WA|NJbIwLhEUM^D}B!x6sw}kha8ij)h13xU|i?og@L4k=+WZ z%qr2{fiJOjs?f;5^G|RC8zqU(5qpMR@@JPrTqdvwe{`8-aUE4ZKp+SVwRt9 zby(6Dzj_%0Q>}7yl;B<55oyi`3aYTqi_NqTUwTsD<+zr+$xx-~+g!bs44qb{P7aCFw#`D;Z+k0^K3CiIw~wH=D6>=N$rD=&$my&y*q$AuY+1i z=SK$+OPb=^o}E&*T7Gn6Ix&f{i+1HP#&8hzF(YKV?fs{{MVr*Dfvg2*lP=bYS_^{j zr;lAvuU$`Vpu|bF>2iTa(H6qDD>;*_T4@(X7;)U3$M46QqB_g^8fUYO0w+a)B(E<_ zR}%>t3C7rM!apyU$sA5TZB4yi^v|n#ndd2L4N}v>_U&bXN_?q1e~_?Pyv#SlDI`o- zJ7zB4k(AX`S+v)N08M(7J;BRV7?iE7Rp|TvTN|-e6IeUVMWK$<85AiS40T9B(w_AB zcKmcZqm6B^PVv3$P$6|-Z&Cwdg3O!7#I&s8+l)u1d03FQpJz6){jrDHi2`!BeV}DD zK6UeX$%Zikdc>B7?A0nC_0qzaD!IH##$N`nLTM7^bzv{>07*Os*LJI9c)4zRZHgI! z^WTe7sj^ruX|`;xFUO`TRu8E(UGND_-yfch z>r8%?OD_m3Sj)?~3?TK?!{{p%JJbuN!+*F|CbSxnua-yuuuLzs?5k6hSCIyhfsb)JMGooWbIenP|k;% zfR_smOD3|LuObDqwlU;v#6q3B!F0N!+vF5i>Cq`=Qq%)}g{myrnzMq~ik0j+?xB=+ zvjWIDWe9hGmVB-iBCQ#@UG6A{=?n$R7v;RNt{q?7J!o7+%vqZ34+7s+?#a$I5?8h} zJ4F$}-ta$no!FOb?|4m~lHV0R+edTKr<{raztP#gfBD?>@vgX4;Isa=5@?mZNH;j} zToYuLjwTD{EA*pT=TT>oX$3WnLCTqBdS5+#SyN#8v7?soHP210Gqy15BO^2!!vnOPA$o{`~KqENee+}`Y& zQm9E2^Vo~aanQ>{`Vj^hDb=30BtD^v-ahBWwXtD#A5|wdcoNErhT+Qzpok&`*Z$=D zs`ID^`-E1Z7hARhRyJ?Zwg`0=$|(^UF4DB`NlWIDf>@BsnIotktIVI`K)P=q|exllG$4_eaRi&4aNy*!7jaFMCOuO1&~>({pl(R#T#uu3XPjeO|IkKJbx(W<%fWpu}oa8en8)rFTshOxt<$eT{?#M?84OO24oz-vpN z>hcHA@{ZxZm^+6Vgd_iyJP1?=r`Y-jveM=Ms0lM}VlAR$x@9!x4jrZ$vmLDEeS9|K z++9b#4?=ESU7}e}P+V*5d$H=Pnz56gBSAZE-%FK__}No-!{QiRVV{Mqt>MP5Ex$qA zrlQ;+`WCc?IKgs2^EAojrI)Bm)K121`YwG-e*x0smTBruqj=jn!)H>LZK7*&b@80D z5b(Uivs%kt8R;>ib%EoPFG|-guI#X&^Cl;);h+I!am2K%$kRwjOpo*d<)d#8Vo4c| zUkrRN!8BO92BQ}f)={i5_P;I4Z?$BL?d?wW^l(G|#NdSFsD9njMq^vqX{m^+qxrOQ zxs@6-Kj3IF7NZvU8X&S8B+U5;<4St#6Ze!|g@sSd8ynvz=L+)YML7oS-IuE35@}MF z;8SiRVcWU4Wehi>O524@~NZtE+6CWCgGl_Yiq1t7oOQpdxw_G)HW+1u&jv14?bFdPKygQ zBGRfo)FJhXWrzBxd}BWA4@VzARvr_z_4_J+-Z!B-YJEf3TZKo?z%XcO*xTp6sG;Ne zwuLOowg?m?{yea@ImJ1s*yv?}O8h%YD)!oMF$f8dWQ2a&rKiOdE;Wdnkp|N_=_ETp zbz>3L6H6!Ssv1HfG>Vb8Sakg``7lQ2GKGIPkg9a>s3e_1sCh0aNc=~jXMJJo{Mr1- z_YA%URA=A;Mwvo9_g59ZBT@$0gjdoDz)QE=wxtPOG;8s}%)|iKIOk)+OLO2+uU|S& z?T_8@@VrmYGv<|VdlPXwx^faw5|obAc;@Z(@w1&3!f-~W=sh4#O8D$xF5Sh@woYB} zMO;&s&hvu#iJ6MXw651(3tV~yKDDq%{)=Pu2LxF?L4uRD+Kn`PTcg+FB2+#fsrn+0 z_lMR^mreF-#uo@dmtSEkx@Hbe?Ch2qxFp7Iscl2PzkV=2^a>ORHH)kJibcD5ZTYjwmK7P%yxQ)D_mQZYOM1 zak9?!CD^YK`OHS6kWL+oR$|tY&7>65^_36H?S#4e&Winj2Nv1IC{s%a`7 z7$7ljg;nHWq()NI7qzS(O`5mWW8@O-Q<364iqb9AV^yxhTfFcHKmLM2xeGOS7C2FP z*v#w=C4V#_$fi78xdoxAW65N#(@Z2!k6=jzSR>Fi{8%U}eNSiAvfMPVaWEOLh0CLl z@x7dYs>La*E7!WRplNpfYg6C%!gMz7ld|P3aKoSUbfo-@X zLw6?YN7TRuL`VzvPY}t!jh}xNP(Qck*)a&mdIi|ttDiL6PUP=6eUcXPQ|{?9fp}+y zz(V8C2h2l-Y)mQs$<}WsUc`>YqkJVvjV)qVc=Rwe?qQm~2uf3cb?a-s^;2SRCEP_i zRGF+WK4jYuAb&BtLBh{z{^=!BeI;=r+&zv9fo3{p`d`IwTQ#eL2c(;`7>BJowYkj$@m)aeVt@1ycE?3L05?F1? z!-%_m+@{YRMrBSS)t)PJW_1Txo0rx-`}&o<+@yAAvd{Ubf!4YA(|TBSGA;4vfeQ77 z#TQq_JH|8yESnRgP0_M>^MdhnJoK8gYrRpLqzj(C0lmauja?0QOano6Vt-J5E`j7zLwDGa^9Oc$`+zy$7kd9hqdW!h&w*6B1%HMf75M2Bth`u9>4S136?% zztBhTM)6X%5a{zzhhj(1Zv^>CSC9?l?SP(#4!nHiBb8Yv{fU%XpM-_T$OqU1VC~l}Nh6Mb&(Xbt9bNd`i&s@9C(*e}{6x!TOvAnJM#S@_!a1wt zxHvK%{l5N1t|bs$JQ+p8va;{lscG%@RV35C@xo=FqZ;FtN?#&vzW#Y<(@vpD0U4GJ z7@VBQ!?N*=vHkp%X(yog{A3EdLJYEft(xtonncGOEr0b)pWlJPNc#?8gDmn|hkVOE zCB%|B%Jls~aH*SKK9ry=F~p@HYDR6gAvs``S5p3vYA#|7x*x&fVrEa~omxJh4)@YV2zi>5U#n|KpxpsZ8XdO=a-oUEU)!j_an)SnKzs+ zY-K%MtSeS12A)u=H9-Z0sdMz3>EcxjwCPRIo);)bZ`CuNcFD#vt7@Kgxt%`km2X+| zi$Zvn!k3SwTDO#yxHW-CXM{jTi-$)Yo`vXn>a`zy=$OkCmrgz;aM;2__;Z?{G@MKA zm>1KG0Gu89(vw+s-i2<<(BX0T-1hAuG+MsUSbaxYM5leYfS8bw5bS)C%EXP2Z%>Re zbomvFVJgaQlc~&8)UVSVx{^yWi^~sVL{YH_t&O%IsuXVX1xU%r5{>wavxyndF@BN* zKHpdyet^T2bZQvEocaJh8=;b99>f}YB{8|3D{;{GlZzo~vOWe?yFErbcqJ+6*>SVh zC+={7xq{qGQn4kdHmZG9g@>0mmJ{>c#3IbHMJ=MU zYks|4)%CXq%B3EBSBb$jMT_5YRZ#8#giNX*)xbFdnB;+690KQDQYYuu1|GhWU2F#Q z&!XG}h+*P|FCw1CAYwCPkv}o<{R#e{wD_0 zXe^4jb~%r`GX|xMwTK{jyz?`rRxc8K(a74$Ds2azmovn-sW2%D_~bK5B)@r}@R0E* z5qaKO-s)f^VZc&w6bHta2kdA^YW&)gHzLf@MFrk$^bVmnfASpXEqnQ*b&r&ahB`LRqul7i8(D-2oDpPREP7lYJF2ZWi6OOLr!O ziGe|vAx_!NM*)2T$jyyKs}=ZmVQsgII%rw?7Ozhzw7FQ{adg~R^$-WkgpmbbC_&e+ zaU>7xbakD@6zPjqKH=_^YHbPPF>O_{DhsSS076=k50;5{N6*~stWD6*&oR*mGUk26 zR^PQb8CEN}h(PQcl;UaA`aa`T7oG;>>!{SJoO^D15|vDU*Zf4N08Nr#34TqLB0j0) zr4%)`2qj<-1u58{*o_n<*ql1FxR`h! z_vwWO4ZcRNW4dP2e1$V-7q6AT^_oX4_$~40lbPV@NxnhtS$^yv#r6IV@K)o^TP!Cs zJBkO<0=1r<$_>=VCDRVu$NCfpUHRmeq8Fw~$1cYmxqZc0`5|In9OK-fs{P-0gI>`rcBj;{msOH0W=4&}2;nphy{1(x8jU89L zD(c8o(z3XtG!NAj`(izn*bPwJ%U)$yGAX=2lBbZ33lQ&w4!D{|t!AwR@@?_aXDSTo zG3ynLSH*B|eqxtV84L+&jypEiVn4Czv7Sc;B8#R-;&47LRf%=Qh)sHAf~tZIq##q7 z6lWA+Kl4H%^?BzNWm0N1x+#lC_=qE{L#6tyXC7;6e!B=)itWQ%vC?xK!CVr-Bs-0@ zLEo4Sci;hhK6KHm?CbJnqGMUytB=4K13#Wjx)9P4%zc5yGP&$&w)28+`f;(RZej?k z8Sq6*dqvKVXB;rVx1y~0OlV2e2k4AgD zJhySq*ayY>bhr7^fhtT*c7IFJpAZ(YLV>{De)=O zMvIM1M(8BZt=ExLDG*WW8GQQ5X8y(nLuYGQozRroj!Gp`Mfq6clrVB@&m5HOrS~0r zI+|+{gC*;`KI+k+ma6g|f4jK>nSqHqd@#7W39=OmMbRojaC>mN&?S3B^fSnJG6-%zwpI=(0v^VF?Sg z2ZoZ4U^yUWjDOjUr&N*hvB;Bp`r1X!9i?R&+*|ZY;PC4v!!!`j`57$xm@t^f&Tu*8 zl|{_s+skI}x_L+Oi}8t3F)uJB(Ub*G#-hezW^b@T?CE$au(SCXRptjKCSv&^tLIi2 zso)L6EEwZV(*Z>)uj7jvgS|8~`daV~kQO-*t(~RyD)PB=6_?w?BN}0R3g!Awwh`x` zhVNf6vsG?hai0y=(jF~p_$uZ(@YKB%#1Cfg?pm^2^Dtu;HL(8Fw{UQFz%qZcF9N=J zOC715$fOoV{)FLy4H~$sa#m1{i^J1Rs?qEfdr*fQGZmS^0U&xO%=CT!K-*^K=aQgf z^{rJ@?XNW^&cm^cE$>x7qX#xmXsayjgLTzuK7*;zP-44y0TJjU1NT%4ehJ!1@9AbEzb{}Oqoy7&wXxF`##jIG0;Bt zNZCn=4%{Cd%GECLW4k!6f!?){fBl7^ET^y}U7VWl;9Hf>?lI!i`Gc~Mno%zhWR1ke z#ceUB?+~)n#!U-r8HT^pu*Vi~i?AaVdTx24&50>hz?l zltI$cc#Pg3SK(tl64j>BBNr361L&Jn4E(71F!=FDN6ERy&x?1Gm1D)Lm|~b8bSdJ# z5<926vVUl#q;#BnxW4#v3wvfX-DA>pO6o_q+sSj*!U!DV}> zvF>`22b3`&->T#Q)2&ouBVOGyQO>JKlaZ98Cx1h=Reyi1Vn^w^ra>fE-l>IGT+(h2 z7yBmaNAi=Ps-DLADdR#qW}IXxEDay{dc1c1oJ*E3ZZ~2Qf-uC=kR&}6uR1=Oe?P=2 z0wEP7n-?^I5L2)OAhVk;J@({4JXg4G;Z)7%mnXX;*UW;>E z7fWQ!Dt>y?DM4}RpNrR0vTe@40*;q$%5G#p^+>3id(izs7&|qd_feGe{#&}oSsUUv zjeN5D)=}!@8b-Wg&NTe+^`M99n3(DuUS&$XM3v<3C0baMZe`6%b)7F)5hQL7<}$W` zG!(O@to^9i&`R4xzp5K)_D&pdF=@jwP9+a}Lep2n>8ADb$*q>r=-gK?_xyp@h(4X9 z>7$%!5fqK+U^ZHylq#&&^vQe^2(~-#`~mxEeqhFDmX;`1D&y^n3@0+!v{+uEz(6&v0GmfA|@Lx8+eJAuGZmKGi=a7M5H& z--xYqtF2ePq0b(n8TgtzU;lwyweh*{tPb7Bqtj+K64o5SqR6ogEyEb{7ohytpLv)D zk`CG3dL=<{S$sUVKa#TQ_?!=*y7^zBOEc=pvjXh^g0_4F8YV7Zr(vexQry4|-O)?iI4L+jP zit7qjHvN()&lU4{6`CoklZ##?Y)19y@DJ^9)Qs3Ne9;!RCqG%l-*D07{N%zP%6OUp zl2UZ)8LDT~uDTF6OUn9Aiqppsq#{5c)c1JLrtJv{?{1ZEQDNTgQ5qj~hmmYKe1D|s z5OZ%{pu@7|X_jU|<<782@GD}N2j6jJ3->~d0@~F;*@%0`ac8V_e==Bj)g8K@lQ&o} zwUf}5w|#gr1p;T~L0_Z+Y#UPw2@4#zXwKX$qBM`(B>N!cB!w;>AmMz6=l0LNxx-@l z1mg2j_+j5duPdh_c-Dy0mA_p{hT{Ja?81Fun9}63hk<(a**;1jn+jGP4 z^6m3SZ{}L?T~4~UKZ2B-rJLW7kZ!TYd9)5D$hLm>F{)uzqKtW|iGpD0W$S zW$YvqAu69d?U#QJdnKEv3mt2BNTd}r*zj%cnU`oDn+dkSiGwz7?JX;_y+Q(s4GL1B z+kJc!dFaUAr4TkE-r89A5r1)SKJYu;*sdSa5-xQeBNGq0z^D?D#24u;n!@0cwxN=i z&^WC@VfP6(Q!CTTMa|Hz_x8_EGCm4EcN(@w8FeR3)^wI#=8{PF)=l`y9$pjB`b0i^ zv_F@08Rr7az)Q6(*X`^thA8+ykSr%SufA89^I0Y3H1g&U2WSE_-wR-d zRwis7jn;i^7V#_13sZ~Wj(@eziHCoP8BeLMSf*_$%3icH=oAh9+H9=)0<=xA4zPH* zxT}_Ebf6ar5fyR!y4dvaotynWgSh4sjh)ye&z%4vOT_kQ%wE(XJ?HJ#C`l4VN?%3; zqXL7K!BdK;NL#1KKmly{q*Jsjn*n z;=y!$exo1iQtBU{t|M`(#Vc9o35{fSXfNPM@)?s>sf?;JsLC9j%mI4qCeuf2$K9|~ z#|vv7B@=z<6HxU8TqH<(JO1452(FP>_A2;RgAIsicKbg9lN)U0OvrKi{WQF@<1cYU zqRc2vl{%KrMj|=)j(Dfv-{#-t)>NyLQb7D%)aj-gr}#I0{{V~!on)KFG;FzyA<~u3?PL9}*IJ3(hrS#t}x^Y(_ zp2}*CLZthIKyPxTI$XBIJ4CVHG~cIB%SW^&hD_J05oJd*nDfngl$$(=w)woZObA9& zi3YUX*safwa!NT5VTd4;d;C|=Zn^^XfCi0Xg8u+ID^Tb2+x?-lAPjR~k3i9}7iBs- zfVIvUzd_|y>v62{XT*#-P8xVwC+4QBu&ZR5prS9`>FL%xEcf+|+Jyqe;#CHXP_I;} zG6F(Sr_^K4Nu}XU1$mAdK0~|TYPnwT|4gw$shKK zyssmh700g1rNv$eTs@f#?e>{^q>Z|K)*5BfQ06#4ADMCU9V-+M9hls!C@u_JR^=C? z-LaXOAfXQ_OOkJ~4Ae)&Hl&JMm)`E%v&&I))Aq~7rWPT3f-!A=9p@paa(O2hkHPxz z!b_N@4jA&)bKHGhbbP5J>J(i~Lq~{HxUnh;H9X^1f@DI%m@*1eL^0HPoYv6VZX-nO?omGPp~~TgSf9s+(;7!P>g3Hwn5qrq^y?&-+XXyKl1BCcc(w<8I@BB z*}rId3)fYPNQDWH67=fRB*=8`BW2MKKg~7KRm-;ypX0PdrqYZ~--2}L!5ln0STf+K za>WHZcKB_hHTG$cp7C+Yf9R~}7YqhuL#ae|JJ8(NiQqKFL*wvWD;9gpwU!?ie$f<~ zgkNUGp8@FeZFgOvs*&b0VIh*ke{}uCP+#HJjQWU@8OvmDOX8Ct>~DR2+)IfiUNoIABCV zy%)cx`F-sYnslg4fR7J_#D~kh6?XpsE}G36%l`~t80->=H82VH2)St4h>?FSvi$bZZpK>R(8_v^T9E;u4-Xd>UXDGZf?}9{doTI4tXheX z18IyF%`S$-=~cWLdOR^K*OHhyzoVw!uie$zoU1W(hh@@yAF}O`hvOoVM{MN>V7~B@ zD50ugn{Tpwb(MBQYmhy|z8KN$*B&ZeHU{oA!*X0?1PGnjo80m9RL?KNN=q(RJe3J+ zcAa`vT870^u>r<8T%JUE1OpO6psQTOL|-q(eMA~G^*Inxn~*JkS+=c3=(4MH9!&UR z*qoU0-sa_)?e4{KaD_FWO)T-gpGid2?T;4wmo?>hc&W20QR36>_uORjO!E;CNTr5!vZXsY(_V33CU0g$TQsA2Iv7lFN-cz~y-K z<2Cr{R-nSANYrrUPGq-&-_ER?cOA^;Yxu^ zqSYWA8DB{+<+vHCZPUztzwj>T-Vkp2wF@nhX~r>eaxWC^n~KqeZM=5{h|PR`p8LD+ zOMP}k2cO?1j^6Gdp5N;A*F`T7`6KA^gJSI$inbgIJ!<8vT}H4er1R1r=8KH9(<9zN zf+oY$(!F)2vY;!DOyfV>BZ1eGvE86uRf24q<+qGSgk3{kn~I_*O%?`#T6LJ>{{Xau zkZ02B#UK9wFx^TdSmTO8WtoLB&>pYx3Kx2#EZ{HXrRC1 zOA^9r^$zH#R|Sfc^N}`Cw(ty$%b6W~lYdfl-?z_NYRP7r0`3GHv*3hs@%)^Jxu=(#6Qh77IHC(S@hVXF32#D)?J6(Uzo2=U8itOD~we8cA5e%hieRvGqmx? zZ>EH-=b1$sw5bl0i~u~@z;Wl}*}8qw`^l=ZJ(QO9IH8)Re=;KR)gjTt(6Uax9xgmO}CjNLQ(?V7}Q>Q0h zm1;#6EDJ#1g42TL5d0&)`~Y{v)jwuGzPiD+ex(`whYnfvZQ&E| zE@F`VH8Z4|ca*s#A0w(dmxd7j9XN}4LeMl^?WkzUhN|*eKIf8 zKU^1OE(8*RNC?qJL_&}yAo3A0b8U@x?(+2(h0i0GWyVh1zpb0T8;wu8y^eA*GNye8R`6^9(>sxOHouKH6;}OV5xWWboFl7Kcgw7q5FlqEh*8Rh-SCU&Z#Y%KG#BpWEL$pH_4faj;9kohr*}LfH;wj-lIl|`kjN;K4Q zR)bc0uH=ZZ=;24HRyl)YJpjowbd^=nPz+Ry=5t*R-NRKJnB+lJ-n3S65x5>5 ziz?o$L1EL4QaO%`Z8k3htVDd*X4S-1>*vu|S{vVRRc`dje$VYQcQ4vlH^*%@?EWL9 zsdPJj1sts+hRs^zrm*ELCb*v$7@%H_PRTLcm0HlJHSTY=J zYV7!VLPRVJI0zSZ>PiTNM5VvOTJcK$nM_|$3o90$7y%|5FNl^gziJ{o{14!tU1_eM z2~^RjN4TT2%?|-68})#hV~UrjwMYC5OvI=SPiVG{Cg_6oW|45ii&u*2Jp3^OXh;Vy z*{ZMYG497?`e?nx#ztdl?kPo;YrRdbTE5Vg70jcSW-JIO@6FtM{a?#gw4%f>(qO!B z*{pzK$3GCn^Te{;+S|<#z8mNa#apb&w&BtSJfV2r7V_-pW=O=Fv6 zmZd9*O;@9_YICTpHyOfT3lI#1HY99SRWeMxl)vGt#S%!FBqh!?TRc1Bu28B`li zyrW5`r)UR6h6(qDm1qEgPZ!?L76aLJ;Q z&&hC`YIz9r~fSEB9RrkZhl zgS0*!-51naT_N~%N`#Dw>8i};LTG@Ppl!N}sEGaqKhH?4=}&x-j5N6rTd7;N-FoJc zUbaF5&^&1is3(CG&G*0&&Y17qn2B$uV4i$F@zhc!a)CkNy}wkD+%~CG>Om=o+noG~ z0@1*!5Em?uZTkq8*R)q?3}o&IpQe^c7kwPZ(nn@9;EDcO-ZCd@eC22QtPcTxwX2|v{HA-Ayd%^O`Xg41LY?se&yLH>I z*I!$KbagtmpFGQr^!@@YTQ;QA&D5%sBT~XXB*HoGd4eUnY}%spHvF|lq2dPk#Ha0> z&tBBKY7Bv5G3GqU2=^oeoFV~FFE5i{KHr~X9z9BagRR{kY7(I_$sDkeA?^y2@18D;7k0 z!!(KZ^VIr-%nQVC0Y?lFeSEb{mo2u`%JG*2$EDWkbqpj^P8@-4!D)ds%alvgH2v?6 z`nAa+MhA_u-9?wARo*xz-)b`61Rq|fzRCXGNShyQt)`h$=``lWh!1n5cD(b%84!|m zK>$_T{B;?!0Xqr0wJp~krP&to%ydM2TkQLY>~HaK)ezm^_jR9Xw{Qe#G76PpEOR+ku3Jti$)ipWTlCBYZ@Za~18Y5miG5ZaGc z#5OVYZ|T-OWa&?-8-QiWsTe++7lyX|SgO^!P)?Li@=fTcgYr*d+jVc{6igEjhLKw7 zyD_C@=aRB5Bxf4Qt75a#i;~PBzKB^mfZF!+yklGG$eFRNMM$V1D2T zn6wRAgLc|3c&O0pRXNNIQk@1SGaDuAfyZr8+_p}J84u&E?t93q>IrfEmvY~f*_O=_ znmx5ilKU|!vLm`LxbGxeY~67HQMWS>Pt*I_ysN{&{{TMHODcL}WtT;{skJJ!sqv)K zY1OA0$^(Jo9U?N>K^J^+2QyJzvcR%GPJvbA`G#U-jRm`cpG$#uQ|_n}9%NZ{;f*p; zx+RfxS3}gkmF1wP=DIyKOVjqnCHIXv1J-@~F`Tp~Z-W#FgN5}sFXuB4NTDlqM+l>23u2xN(AVG(6?}Bg`5PxC^ZX0^s``0tkdxcv!^`vEE*4f z#wLO!B%dPwdgnKVT-6=EkNo%@j^KxP(cc`MWW((f_9wgeO;o5}7A+j=)=P{zIFYmF zzf0-4frjL+G`^&Ccm40KxVq|WuaZA$+(yY7V6)~k(uDr(je<7!piUdfqJ>QZ^4B-ptl_PG4#x}g_czGc+z zAu_y%oi?rQOi1Li@wn)hY2+d)@ceiDHC2NdNli*icG@!oom8jk+0&bu%?HUYU8CGyCeHPRId~68ytuk z1R^5&1wNbk=`}Qfv1emVtpua*W=NB)lf&p-ld1~&?f(GpNY*bvHePe{H+8!flNKvL z5}JAOa#K{$F_a>CHiErVy?s4LZDchzO-Tdb#+riNQj_gUl{P#o-?3m;9CA=no|eu` z7$zuhzN#aRJNJ*CtxTz7nCjU3VttqPkd;HJNRKv^8gzPpGMdY=%pWowi8>mn?u*xL zYs*IV4StM`6wEVAPVJ0&}cJU_2f zYu5ogHUu&jV{@PZNJ8hxWR0{11XV7my`+jLnzKvDTRX@wL8Q{^){Sc3tJ~A*;LGjK zhAhq#F*L~(Q2Vi7xh`LlXb&%`<|Om-5nd=(q0z4DwO)hc8>e$yBL}&GA+Z$O4!w@! ztr#zpOax32;jO{L_MqGsrMb>1V;VCvnR(MADefO*+lnXeuieCSD(I(@$m|Z%W9@`4 z3!?D0q*Gl0WNtAQJyrF$*LpC16{Fh#He_hyTNB6oFN*XGkv#DrDJ zrY+Pf)VnFcx-BY~j($9xYYeh&x=t{DMxK1;{e9_=Eqz<2o2I@^I)Dad*Mhb8ezTj=PvBC$Z$Hf9*bb zF8ZR9bfcq(``hFH0MTZ(_`O4$P-fSvOa|YW$rUw;j<#>)0$lc72GeHIQ@BBXABM1+ zh2uVrL%dR;g5&c&6n@0QQsdG5y<(EaC#8eY?H9nzb=^vn2uc6^9ChU?jClHa^$$i zWKf{S#g>Pd^KsNqBQZqlpG^u=-gwP>(vKpCJJ}}=n-*Y^Hlqy8!bp!)A9!VtPvOyH z)I#iKYSfu^#~rUxIVT))WEnLeA^FJn>D#L^i%Wl|H$l89F34^jFEX1bxge~8a+@Mf zneM66Q@5xC1BtcDY{n-=oBBfZi1U~tU>EsAYAyzLfEIg@@`Do7-B%{KXQBJ0!g($$Q_ z^O4>lUJ>|1bW1j&Ek?6)lZ{6{tw&*zFP$<{GGC`e((%*1da|>4D*ivTR(5AwTksa< zNR@kV;=NwF$*FQE(Urytp;na7nkb4}F3W-@!O?b4J@mwJ$4@$7+2AhRy&{(fMwK=k zik)UfM4OD;pi$Tsk+MWiy$Dqc@Kr@!X;*S|gq}$gr)gcVDKl(VrPVFz=u##$M+l{d zH;^jQrscZ#817iFBB!!iPU#2})Ro37a+b=^L1EiV+3EAnjy0%Fp)>2_3U zRHJxkHN`<;qGU!xLPmm!Xoay7B9dp8`!AN8StNJn+Fg12!jJIPi8loZPj`%hXr2L=-3rn z?y(N&uT|+OReD^YVwn|}V-rxBK_?qU-A83q$94FjZ><=WN-R>})FW12;U&XswR1af z+;H1$&OE5GXSM(`1Q#-IVNTWU*?UXX=cb^FU9Y6y^D8GOKy&QPPnTfQYn4dViB3hC z3Ab2i3j%3bkmwRF-J~c;N-4J9UY*r7)oxw!3^F~@`2PS+pX_u00BUF@sevqOf3Dy6 zDpljUuKR+F`dsTmrPP>h*o;%+HfF?t*cDApHz8exdq{k>^+bwP>0{){RY$S-c8lBj zliRC`)eai*?um3?QY6*}g3F|739xLQRc*vx)l@|2tMOfBZwj4>w$)!}fz|z-+S6)F z?Q4zO*Z%;BgqoEmp>JCpHH{c2FzhjUbe?ho5ULuhNGK<|DRfo%>m8(Hwz+^mXKKYd z;ixZz2~@h{EsJo;osJ?Z8mR+-s*0dwpi8$=UGefAeEz$U`3<^eYYnb2(Wo?9gh}*R za%ocQSq(-sSsTV`Kso~Iv$&xsmgq^kCM)Ef6G^B^IXVm~)?^nVz^Sk*uT%{fZIW{o zLUg+uCw3yhe8{#_HS<){H!vY0=VG-)yPmRHb~JY(JCo$vtBsLu=88y!U(an84t}y3 z+f=!enOdk+6h-N*GZnG~*$Cic9k(12j`cMA torch.Tensor: + """Convert the image data type to the Tensor (NCWH) data type supported by PyTorch + + Args: + image (np.ndarray): The image data read by ``OpenCV.imread``, the data range is [0,255] or [0, 1] + range_norm (bool): Scale [0, 1] data to between [-1, 1] + half (bool): Whether to convert torch.float32 similarly to torch.half type + + Returns: + tensor (torch.Tensor): Data types supported by PyTorch + + Examples: + >>> example_image = cv2.imread("example_image.bmp") + >>> example_tensor = image_to_tensor(example_image, False, False) + + """ + # Convert image data type to Tensor data type + tensor = F_vision.to_tensor(image) + + # Scale the image data from [0, 1] to [-1, 1] + if range_norm: + tensor = tensor.mul(2.0).sub(1.0) + + # Convert torch.float32 image data type to torch.half image data type + if half: + tensor = tensor.half() + + return tensor + + +def tensor_to_image(tensor: torch.Tensor, range_norm: bool, half: bool) -> Any: + """Convert the Tensor(NCWH) data type supported by PyTorch to the np.ndarray(WHC) image data type + + Args: + tensor (torch.Tensor): Data types supported by PyTorch (NCHW), the data range is [0, 1] + range_norm (bool): Scale [-1, 1] data to between [0, 1] + half (bool): Whether to convert torch.float32 similarly to torch.half type. + + Returns: + image (np.ndarray): Data types supported by PIL or OpenCV + + Examples: + >>> example_tensor = torch.randn([1,3, 256, 256], dtype=torch.float) + >>> example_image = tensor_to_image(example_tensor, False, False) + + """ + # Scale the image data from [-1, 1] to [0, 1] + if range_norm: + tensor = tensor.add(1.0).div(2.0) + + # Convert torch.float32 image data type to torch.half image data type + if half: + tensor = tensor.half() + + image = tensor.squeeze(0).permute(1, 2, 0).mul(255).clamp(0, 255).cpu().numpy().astype("uint8") + + return image + + +def center_crop( + images: ndarray | Tensor | list[ndarray] | list[Tensor], + patch_size: int, +) -> [ndarray] or [Tensor] or [list[ndarray]] or [list[Tensor]]: + if not isinstance(images, list): + images = [images] + + # Detect input image data type + input_type = "Tensor" if torch.is_tensor(images[0]) else "Numpy" + + if input_type == "Tensor": + image_height, image_width = images[0].size()[-2:] + else: + image_height, image_width = images[0].shape[0:2] + + # Calculate the start indices of the crop + top = (image_height - patch_size) // 2 + left = (image_width - patch_size) // 2 + + # Crop lr image patch + if input_type == "Tensor": + images = [image[ + :, + :, + top:top + patch_size, + left:left + patch_size] for image in images] + else: + images = [image[ + top:top + patch_size, + left:left + patch_size, + ...] for image in images] + + # When image number is 1 + if len(images) == 1: + images = images[0] + + return images + + +def random_crop( + images: ndarray | Tensor | list[ndarray] | list[Tensor], + patch_size: int, +) -> [ndarray] or [Tensor] or [list[ndarray]] or [list[Tensor]]: + if not isinstance(images, list): + images = [images] + + # Detect input image data type + input_type = "Tensor" if torch.is_tensor(images[0]) else "Numpy" + + if input_type == "Tensor": + image_height, image_width = images[0].size()[-2:] + else: + image_height, image_width = images[0].shape[0:2] + + # Just need to find the top and left coordinates of the image + top = random.randint(0, image_height - patch_size) + left = random.randint(0, image_width - patch_size) + + # Crop lr image patch + if input_type == "Tensor": + images = [image[ + :, + :, + top:top + patch_size, + left:left + patch_size] for image in images] + else: + images = [image[ + top:top + patch_size, + left:left + patch_size, + ...] for image in images] + + # When image number is 1 + if len(images) == 1: + images = images[0] + + return images + + +def random_rotate( + images: ndarray | Tensor | list[ndarray] | list[Tensor], + angles: list, + center: tuple = None, + rotate_scale_factor: float = 1.0 +) -> [ndarray] or [Tensor] or [list[ndarray]] or [list[Tensor]]: + # Random select specific angle + angle = random.choice(angles) + + if not isinstance(images, list): + images = [images] + + # Detect input image data type + input_type = "Tensor" if torch.is_tensor(images[0]) else "Numpy" + + if input_type == "Tensor": + image_height, image_width = images[0].size()[-2:] + else: + image_height, image_width = images[0].shape[0:2] + + # Rotate LR image + if center is None: + center = (image_width // 2, image_height // 2) + + matrix = cv2.getRotationMatrix2D(center, angle, rotate_scale_factor) + + if input_type == "Tensor": + images = [F_vision.rotate(image, angle, center=center) for image in images] + else: + images = [cv2.warpAffine(image, matrix, (image_width, image_height)) for image in images] + + # When image number is 1 + if len(images) == 1: + images = images[0] + + return images + + +def random_horizontally_flip( + images: ndarray | Tensor | list[ndarray] | list[Tensor], + p: float = 0.5 +) -> [ndarray] or [Tensor] or [list[ndarray]] or [list[Tensor]]: + # Get horizontal flip probability + flip_prob = random.random() + + if not isinstance(images, list): + images = [images] + + # Detect input image data type + input_type = "Tensor" if torch.is_tensor(images[0]) else "Numpy" + + if flip_prob > p: + if input_type == "Tensor": + images = [F_vision.hflip(image) for image in images] + else: + images = [cv2.flip(image, 1) for image in images] + + # When image number is 1 + if len(images) == 1: + images = images[0] + + return images + + +def random_vertically_flip( + images: ndarray | Tensor | list[ndarray] | list[Tensor], + p: float = 0.5 +) -> [ndarray] or [Tensor] or [list[ndarray]] or [list[Tensor]]: + # Get vertical flip probability + flip_prob = random.random() + + if not isinstance(images, list): + images = [images] + + # Detect input image data type + input_type = "Tensor" if torch.is_tensor(images[0]) else "Numpy" + + if flip_prob > p: + if input_type == "Tensor": + images = [F_vision.vflip(image) for image in images] + else: + images = [cv2.flip(image, 0) for image in images] + + # When image number is 1 + if len(images) == 1: + images = images[0] + + return images diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/inference.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/inference.py new file mode 100644 index 000000000..bcf39858f --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/inference.py @@ -0,0 +1,125 @@ +# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import argparse +import json +import os + +import cv2 +import torch +from PIL import Image +from torch import nn +from torchvision.transforms import Resize, ConvertImageDtype, Normalize + +import imgproc +import model +from utils import load_state_dict + +model_names = sorted( + name for name in model.__dict__ if name.islower() and not name.startswith("__") and callable(model.__dict__[name])) + + +def load_class_label(class_label_file: str, num_classes: int) -> list: + class_label = json.load(open(class_label_file)) + class_label_list = [class_label[str(i)] for i in range(num_classes)] + + return class_label_list + + +def choice_device(device_type: str) -> torch.device: + # Select model processing equipment type + if device_type == "cuda": + device = torch.device("cuda", 0) + else: + device = torch.device("cpu") + return device + + +def build_model(model_arch_name: str, model_num_classes: int, device: torch.device) -> [nn.Module, nn.Module]: + resnet_model = model.__dict__[model_arch_name](num_classes=model_num_classes) + resnet_model = resnet_model.to(device=device, memory_format=torch.channels_last) + + return resnet_model + + +def preprocess_image(image_path: str, image_size: int, device: torch.device) -> torch.Tensor: + image = cv2.imread(image_path) + + # BGR to RGB + image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + + # OpenCV convert PIL + image = Image.fromarray(image) + + # Resize to 224 + image = Resize([image_size, image_size])(image) + # Convert image data to pytorch format data + tensor = imgproc.image_to_tensor(image, False, False).unsqueeze_(0) + # Convert a tensor image to the given ``dtype`` and scale the values accordingly + tensor = ConvertImageDtype(torch.float)(tensor) + # Normalize a tensor image with mean and standard deviation. + tensor = Normalize(args.model_mean_parameters, args.model_std_parameters)(tensor) + + # Transfer tensor channel image format data to CUDA device + tensor = tensor.to(device=device, memory_format=torch.channels_last, non_blocking=True) + + return tensor + + +def main(): + # Get the label name corresponding to the drawing + class_label_map = load_class_label(args.class_label_file, args.model_num_classes) + + device = choice_device(args.device_type) + + # Initialize the model + resnet_model = build_model(args.model_arch_name, args.model_num_classes, device) + print(f"Build `{args.model_arch_name}` model successfully.") + + # Load model weights + resnet_model, _, _, _, _, _ = load_state_dict(resnet_model, args.model_weights_path) + print(f"Load `{args.model_arch_name}` model weights `{os.path.abspath(args.model_weights_path)}` successfully.") + + # Start the verification mode of the model. + resnet_model.eval() + + tensor = preprocess_image(args.image_path, args.image_size, device) + + # Inference + with torch.no_grad(): + output = resnet_model(tensor) + + # Calculate the five categories with the highest classification probability + prediction_class_index = torch.topk(output, k=5).indices.squeeze(0).tolist() + + # Print classification results + for class_index in prediction_class_index: + prediction_class_label = class_label_map[class_index] + prediction_class_prob = torch.softmax(output, dim=1)[0, class_index].item() + print(f"{prediction_class_label:<75} ({prediction_class_prob * 100:.2f}%)") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--model_arch_name", type=str, default="resnet18") + parser.add_argument("--model_mean_parameters", type=list, default=[0.485, 0.456, 0.406]) + parser.add_argument("--model_std_parameters", type=list, default=[0.229, 0.224, 0.225]) + parser.add_argument("--class_label_file", type=str, default="./data/ImageNet_1K_labels_map.txt") + parser.add_argument("--model_num_classes", type=int, default=1000) + parser.add_argument("--model_weights_path", type=str, default="./results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar") + parser.add_argument("--image_path", type=str, default="./figure/n01440764_36.JPEG") + parser.add_argument("--image_size", type=int, default=224) + parser.add_argument("--device_type", type=str, default="cpu", choices=["cpu", "cuda"]) + args = parser.parse_args() + + main() diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/model.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/model.py new file mode 100644 index 000000000..076c2ffbf --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/model.py @@ -0,0 +1,252 @@ +# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +from typing import Any, List, Type, Union, Optional + +import torch +from torch import Tensor +from torch import nn + +__all__ = [ + "ResNet", + "resnet18", +] + + +class _BasicBlock(nn.Module): + expansion: int = 1 + + def __init__( + self, + in_channels: int, + out_channels: int, + stride: int, + downsample: Optional[nn.Module] = None, + groups: int = 1, + base_channels: int = 64, + ) -> None: + super(_BasicBlock, self).__init__() + self.stride = stride + self.downsample = downsample + self.groups = groups + self.base_channels = base_channels + + self.conv1 = nn.Conv2d(in_channels, out_channels, (3, 3), (stride, stride), (1, 1), bias=False) + self.bn1 = nn.BatchNorm2d(out_channels) + self.relu = nn.ReLU(True) + self.conv2 = nn.Conv2d(out_channels, out_channels, (3, 3), (1, 1), (1, 1), bias=False) + self.bn2 = nn.BatchNorm2d(out_channels) + + def forward(self, x: Tensor) -> Tensor: + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out = torch.add(out, identity) + out = self.relu(out) + + return out + + +class _Bottleneck(nn.Module): + expansion: int = 4 + + def __init__( + self, + in_channels: int, + out_channels: int, + stride: int, + downsample: Optional[nn.Module] = None, + groups: int = 1, + base_channels: int = 64, + ) -> None: + super(_Bottleneck, self).__init__() + self.stride = stride + self.downsample = downsample + self.groups = groups + self.base_channels = base_channels + + channels = int(out_channels * (base_channels / 64.0)) * groups + + self.conv1 = nn.Conv2d(in_channels, channels, (1, 1), (1, 1), (0, 0), bias=False) + self.bn1 = nn.BatchNorm2d(channels) + self.conv2 = nn.Conv2d(channels, channels, (3, 3), (stride, stride), (1, 1), groups=groups, bias=False) + self.bn2 = nn.BatchNorm2d(channels) + self.conv3 = nn.Conv2d(channels, int(out_channels * self.expansion), (1, 1), (1, 1), (0, 0), bias=False) + self.bn3 = nn.BatchNorm2d(int(out_channels * self.expansion)) + self.relu = nn.ReLU(True) + + def forward(self, x: Tensor) -> Tensor: + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out = torch.add(out, identity) + out = self.relu(out) + + return out + + +class ResNet(nn.Module): + + def __init__( + self, + arch_cfg: List[int], + block: Type[Union[_BasicBlock, _Bottleneck]], + groups: int = 1, + channels_per_group: int = 64, + num_classes: int = 1000, + ) -> None: + super(ResNet, self).__init__() + self.in_channels = 64 + self.dilation = 1 + self.groups = groups + self.base_channels = channels_per_group + + self.conv1 = nn.Conv2d(3, self.in_channels, (7, 7), (2, 2), (3, 3), bias=False) + self.bn1 = nn.BatchNorm2d(self.in_channels) + self.relu = nn.ReLU(True) + self.maxpool = nn.MaxPool2d((3, 3), (2, 2), (1, 1)) + + self.layer1 = self._make_layer(arch_cfg[0], block, 64, 1) + self.layer2 = self._make_layer(arch_cfg[1], block, 128, 2) + self.layer3 = self._make_layer(arch_cfg[2], block, 256, 2) + self.layer4 = self._make_layer(arch_cfg[3], block, 512, 2) + + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + + self.fc = nn.Linear(512 * block.expansion, num_classes) + + # Initialize neural network weights + self._initialize_weights() + + def _make_layer( + self, + repeat_times: int, + block: Type[Union[_BasicBlock, _Bottleneck]], + channels: int, + stride: int = 1, + ) -> nn.Sequential: + downsample = None + + if stride != 1 or self.in_channels != channels * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.in_channels, channels * block.expansion, (1, 1), (stride, stride), (0, 0), bias=False), + nn.BatchNorm2d(channels * block.expansion), + ) + + layers = [ + block( + self.in_channels, + channels, + stride, + downsample, + self.groups, + self.base_channels + ) + ] + self.in_channels = channels * block.expansion + for _ in range(1, repeat_times): + layers.append( + block( + self.in_channels, + channels, + 1, + None, + self.groups, + self.base_channels, + ) + ) + + return nn.Sequential(*layers) + + def forward(self, x: Tensor) -> Tensor: + out = self._forward_impl(x) + + return out + + # Support torch.script function + def _forward_impl(self, x: Tensor) -> Tensor: + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + out = self.maxpool(out) + + out = self.layer1(out) + out = self.layer2(out) + out = self.layer3(out) + out = self.layer4(out) + + out = self.avgpool(out) + out = torch.flatten(out, 1) + out = self.fc(out) + + return out + + def _initialize_weights(self) -> None: + for module in self.modules(): + if isinstance(module, nn.Conv2d): + nn.init.kaiming_normal_(module.weight, mode="fan_out", nonlinearity="relu") + elif isinstance(module, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(module.weight, 1) + nn.init.constant_(module.bias, 0) + + +def resnet18(**kwargs: Any) -> ResNet: + model = ResNet([2, 2, 2, 2], _BasicBlock, **kwargs) + + return model + + +def resnet34(**kwargs: Any) -> ResNet: + model = ResNet([3, 4, 6, 3], _BasicBlock, **kwargs) + + return model + + +def resnet50(**kwargs: Any) -> ResNet: + model = ResNet([3, 4, 6, 3], _Bottleneck, **kwargs) + + return model + + +def resnet101(**kwargs: Any) -> ResNet: + model = ResNet([3, 4, 23, 3], _Bottleneck, **kwargs) + + return model + + +def resnet152(**kwargs: Any) -> ResNet: + model = ResNet([3, 8, 36, 3], _Bottleneck, **kwargs) + + return model diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/requirements.txt b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/requirements.txt new file mode 100644 index 000000000..c334c0188 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/requirements.txt @@ -0,0 +1,5 @@ +Pillow==10.3.0 +torch==2.7.1 +torchvision==0.13.1+cu116 +numpy==1.23.1 +opencv-python==4.8.1.78 \ No newline at end of file diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_imagenet.sh b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_imagenet.sh new file mode 100644 index 000000000..5f04f9212 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_imagenet.sh @@ -0,0 +1,34 @@ +# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# Process ImageNet_1K train dataset +# shellcheck disable=SC2164 +cd ../data/ImageNet_1K/ILSVRC2012_img_train +tar -xvf ILSVRC2012_img_train.tar +rm ILSVRC2012_img_train.tar +# shellcheck disable=SC2162 +find . -name "*.tar" | while read NAME ; do mkdir -p "${NAME%.tar}"; tar -xvf "${NAME}" -C "${NAME%.tar}"; rm -f "${NAME}"; done +# shellcheck disable=SC2035 +cd ../../scripts + +# Process ImageNet_1K valid dataset +# shellcheck disable=SC2164 +cd ../data/ImageNet_1K/ILSVRC2012_img_val +tar -xvf ILSVRC2012_img_val.tar +bash valprep.sh +# shellcheck disable=SC2035 +rm *.JPEG +# shellcheck disable=SC2035 +rm *.sh +cd ../../scripts \ No newline at end of file diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_mini_imagenet.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_mini_imagenet.py new file mode 100644 index 000000000..a7e19e445 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_mini_imagenet.py @@ -0,0 +1,72 @@ +# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import csv +import os + +from PIL import Image + +train_csv_path = "../data/MiniImageNet_1K/original/train.csv" +valid_csv_path = "../data/MiniImageNet_1K/original/valid.csv" +test_csv_path = "../data/MiniImageNet_1K/original/test.csv" + +inputs_images_dir = "../data/MiniImageNet_1K/original/mini_imagenet/images" +output_images_dir = "../data/MiniImageNet_1K/" + +train_label = {} +val_label = {} +test_label = {} +with open(train_csv_path) as csvfile: + csv_reader = csv.reader(csvfile) + birth_header = next(csv_reader) + for row in csv_reader: + train_label[row[0]] = row[1] + +with open(valid_csv_path) as csvfile: + csv_reader = csv.reader(csvfile) + birth_header = next(csv_reader) + for row in csv_reader: + val_label[row[0]] = row[1] + +with open(test_csv_path) as csvfile: + csv_reader = csv.reader(csvfile) + birth_header = next(csv_reader) + for row in csv_reader: + test_label[row[0]] = row[1] + +for png in os.listdir(inputs_images_dir): + path = inputs_images_dir + "/" + png + im = Image.open(path) + if png in train_label.keys(): + tmp = train_label[png] + temp_path = output_images_dir + "/train" + "/" + tmp + if not os.path.exists(temp_path): + os.makedirs(temp_path) + t = temp_path + "/" + png + im.save(t) + + elif png in val_label.keys(): + tmp = val_label[png] + temp_path = output_images_dir + "/valid" + "/" + tmp + if not os.path.exists(temp_path): + os.makedirs(temp_path) + t = temp_path + "/" + png + im.save(t) + + elif png in test_label.keys(): + tmp = test_label[png] + temp_path = output_images_dir + "/test" + "/" + tmp + if not os.path.exists(temp_path): + os.makedirs(temp_path) + t = temp_path + "/" + png + im.save(t) diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/test.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/test.py new file mode 100644 index 000000000..07f81f743 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/test.py @@ -0,0 +1,127 @@ +# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import os +import time + +import torch +from torch import nn +from torch.utils.data import DataLoader + +import config +import model +from dataset import CUDAPrefetcher, ImageDataset +from utils import load_state_dict, accuracy, Summary, AverageMeter, ProgressMeter + +model_names = sorted( + name for name in model.__dict__ if name.islower() and not name.startswith("__") and callable(model.__dict__[name])) + + +def build_model() -> nn.Module: + resnet_model = model.__dict__[config.model_arch_name](num_classes=config.model_num_classes) + resnet_model = resnet_model.to(device=config.device, memory_format=torch.channels_last) + + return resnet_model + + +def load_dataset() -> CUDAPrefetcher: + test_dataset = ImageDataset(config.test_image_dir, + config.image_size, + config.model_mean_parameters, + config.model_std_parameters, + "Test") + test_dataloader = DataLoader(test_dataset, + batch_size=config.batch_size, + shuffle=False, + num_workers=config.num_workers, + pin_memory=True, + drop_last=False, + persistent_workers=True) + + # Place all data on the preprocessing data loader + test_prefetcher = CUDAPrefetcher(test_dataloader, config.device) + + return test_prefetcher + + +def main() -> None: + # Initialize the model + resnet_model = build_model() + print(f"Build `{config.model_arch_name}` model successfully.") + + # Load model weights + resnet_model, _, _, _, _, _ = load_state_dict(resnet_model, config.model_weights_path) + print(f"Load `{config.model_arch_name}` " + f"model weights `{os.path.abspath(config.model_weights_path)}` successfully.") + + # Start the verification mode of the model. + resnet_model.eval() + + # Load test dataloader + test_prefetcher = load_dataset() + + # Calculate how many batches of data are in each Epoch + batches = len(test_prefetcher) + batch_time = AverageMeter("Time", ":6.3f", Summary.NONE) + acc1 = AverageMeter("Acc@1", ":6.2f", Summary.AVERAGE) + acc5 = AverageMeter("Acc@5", ":6.2f", Summary.AVERAGE) + progress = ProgressMeter(batches, [batch_time, acc1, acc5], prefix=f"Test: ") + + # Initialize the number of data batches to print logs on the terminal + batch_index = 0 + + # Initialize the data loader and load the first batch of data + test_prefetcher.reset() + batch_data = test_prefetcher.next() + + # Get the initialization test time + end = time.time() + + with torch.no_grad(): + while batch_data is not None: + # Transfer in-memory data to CUDA devices to speed up training + images = batch_data["image"].to(device=config.device, non_blocking=True) + target = batch_data["target"].to(device=config.device, non_blocking=True) + + # Get batch size + batch_size = images.size(0) + + # Inference + output = resnet_model(images) + + # measure accuracy and record loss + top1, top5 = accuracy(output, target, topk=(1, 5)) + acc1.update(top1[0].item(), batch_size) + acc5.update(top5[0].item(), batch_size) + + # Calculate the time it takes to fully train a batch of data + batch_time.update(time.time() - end) + end = time.time() + + # Write the data during training to the training log file + if batch_index % config.test_print_frequency == 0: + progress.display(batch_index + 1) + + # Preload the next batch of data + batch_data = test_prefetcher.next() + + # Add 1 to the number of data batches to ensure that the terminal prints data normally + batch_index += 1 + + # print metrics + print(f"Acc@1 error: {100 - acc1.avg:.2f}%") + print(f"Acc@5 error: {100 - acc5.avg:.2f}%") + + +if __name__ == "__main__": + main() diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/train.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/train.py new file mode 100644 index 000000000..5aa2868a8 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/train.py @@ -0,0 +1,350 @@ +# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import os +import time + +import torch +from torch import nn +from torch import optim +from torch.cuda import amp +from torch.optim import lr_scheduler +from torch.optim.swa_utils import AveragedModel +from torch.utils.data import DataLoader +from torch.utils.tensorboard import SummaryWriter + +import config +import model +from dataset import CUDAPrefetcher, ImageDataset +from utils import accuracy, load_state_dict, make_directory, save_checkpoint, Summary, AverageMeter, ProgressMeter + +model_names = sorted( + name for name in model.__dict__ if name.islower() and not name.startswith("__") and callable(model.__dict__[name])) + + +def main(): + # Initialize the number of training epochs + start_epoch = 0 + + # Initialize training network evaluation indicators + best_acc1 = 0.0 + + train_prefetcher, valid_prefetcher = load_dataset() + print(f"Load `{config.model_arch_name}` datasets successfully.") + + resnet_model, ema_resnet_model = build_model() + print(f"Build `{config.model_arch_name}` model successfully.") + + pixel_criterion = define_loss() + print("Define all loss functions successfully.") + + optimizer = define_optimizer(resnet_model) + print("Define all optimizer functions successfully.") + + scheduler = define_scheduler(optimizer) + print("Define all optimizer scheduler functions successfully.") + + print("Check whether to load pretrained model weights...") + if config.pretrained_model_weights_path: + resnet_model, ema_resnet_model, start_epoch, best_acc1, optimizer, scheduler = load_state_dict( + resnet_model, + config.pretrained_model_weights_path, + ema_resnet_model, + start_epoch, + best_acc1, + optimizer, + scheduler) + print(f"Loaded `{config.pretrained_model_weights_path}` pretrained model weights successfully.") + else: + print("Pretrained model weights not found.") + + print("Check whether the pretrained model is restored...") + if config.resume: + resnet_model, ema_resnet_model, start_epoch, best_acc1, optimizer, scheduler = load_state_dict( + resnet_model, + config.pretrained_model_weights_path, + ema_resnet_model, + start_epoch, + best_acc1, + optimizer, + scheduler, + "resume") + print("Loaded pretrained generator model weights.") + else: + print("Resume training model not found. Start training from scratch.") + + # Create a experiment results + samples_dir = os.path.join("samples", config.exp_name) + results_dir = os.path.join("results", config.exp_name) + make_directory(samples_dir) + make_directory(results_dir) + + # Create training process log file + writer = SummaryWriter(os.path.join("samples", "logs", config.exp_name)) + + # Initialize the gradient scaler + scaler = amp.GradScaler() + + for epoch in range(start_epoch, config.epochs): + train(resnet_model, ema_resnet_model, train_prefetcher, pixel_criterion, optimizer, epoch, scaler, writer) + acc1 = validate(ema_resnet_model, valid_prefetcher, epoch, writer, "Valid") + print("\n") + + # Update LR + scheduler.step() + + # Automatically save the model with the highest index + is_best = acc1 > best_acc1 + is_last = (epoch + 1) == config.epochs + best_acc1 = max(acc1, best_acc1) + save_checkpoint({"epoch": epoch + 1, + "best_acc1": best_acc1, + "state_dict": resnet_model.state_dict(), + "ema_state_dict": ema_resnet_model.state_dict(), + "optimizer": optimizer.state_dict(), + "scheduler": scheduler.state_dict()}, + f"epoch_{epoch + 1}.pth.tar", + samples_dir, + results_dir, + is_best, + is_last) + + +def load_dataset() -> [CUDAPrefetcher, CUDAPrefetcher]: + # Load train, test and valid datasets + train_dataset = ImageDataset(config.train_image_dir, + config.image_size, + config.model_mean_parameters, + config.model_std_parameters, + "Train") + valid_dataset = ImageDataset(config.valid_image_dir, + config.image_size, + config.model_mean_parameters, + config.model_std_parameters, + "Valid") + + # Generator all dataloader + train_dataloader = DataLoader(train_dataset, + batch_size=config.batch_size, + shuffle=True, + num_workers=config.num_workers, + pin_memory=True, + drop_last=True, + persistent_workers=True) + valid_dataloader = DataLoader(valid_dataset, + batch_size=config.batch_size, + shuffle=False, + num_workers=config.num_workers, + pin_memory=True, + drop_last=False, + persistent_workers=True) + + # Place all data on the preprocessing data loader + train_prefetcher = CUDAPrefetcher(train_dataloader, config.device) + valid_prefetcher = CUDAPrefetcher(valid_dataloader, config.device) + + return train_prefetcher, valid_prefetcher + + +def build_model() -> [nn.Module, nn.Module]: + resnet_model = model.__dict__[config.model_arch_name](num_classes=config.model_num_classes) + resnet_model = resnet_model.to(device=config.device, memory_format=torch.channels_last) + + ema_avg = lambda averaged_model_parameter, model_parameter, num_averaged: (1 - config.model_ema_decay) * averaged_model_parameter + config.model_ema_decay * model_parameter + ema_resnet_model = AveragedModel(resnet_model, avg_fn=ema_avg) + + return resnet_model, ema_resnet_model + + +def define_loss() -> nn.CrossEntropyLoss: + criterion = nn.CrossEntropyLoss(label_smoothing=config.loss_label_smoothing) + criterion = criterion.to(device=config.device, memory_format=torch.channels_last) + + return criterion + + +def define_optimizer(model) -> optim.SGD: + optimizer = optim.SGD(model.parameters(), + lr=config.model_lr, + momentum=config.model_momentum, + weight_decay=config.model_weight_decay) + + return optimizer + + +def define_scheduler(optimizer: optim.SGD) -> lr_scheduler.CosineAnnealingWarmRestarts: + scheduler = lr_scheduler.CosineAnnealingWarmRestarts(optimizer, + config.lr_scheduler_T_0, + config.lr_scheduler_T_mult, + config.lr_scheduler_eta_min) + + return scheduler + + +def train( + model: nn.Module, + ema_model: nn.Module, + train_prefetcher: CUDAPrefetcher, + criterion: nn.CrossEntropyLoss, + optimizer: optim.Adam, + epoch: int, + scaler: amp.GradScaler, + writer: SummaryWriter +) -> None: + # Calculate how many batches of data are in each Epoch + batches = len(train_prefetcher) + # Print information of progress bar during training + batch_time = AverageMeter("Time", ":6.3f") + data_time = AverageMeter("Data", ":6.3f") + losses = AverageMeter("Loss", ":6.6f") + acc1 = AverageMeter("Acc@1", ":6.2f") + acc5 = AverageMeter("Acc@5", ":6.2f") + progress = ProgressMeter(batches, + [batch_time, data_time, losses, acc1, acc5], + prefix=f"Epoch: [{epoch + 1}]") + + # Put the generative network model in training mode + model.train() + + # Initialize the number of data batches to print logs on the terminal + batch_index = 0 + + # Initialize the data loader and load the first batch of data + train_prefetcher.reset() + batch_data = train_prefetcher.next() + + # Get the initialization training time + end = time.time() + + while batch_data is not None: + # Calculate the time it takes to load a batch of data + data_time.update(time.time() - end) + + # Transfer in-memory data to CUDA devices to speed up training + images = batch_data["image"].to(device=config.device, memory_format=torch.channels_last, non_blocking=True) + target = batch_data["target"].to(device=config.device, non_blocking=True) + + # Get batch size + batch_size = images.size(0) + + # Initialize generator gradients + model.zero_grad(set_to_none=True) + + # Mixed precision training + with amp.autocast(): + output = model(images) + loss = config.loss_weights * criterion(output, target) + + # Backpropagation + scaler.scale(loss).backward() + # update generator weights + scaler.step(optimizer) + scaler.update() + + # Update EMA + ema_model.update_parameters(model) + + # measure accuracy and record loss + top1, top5 = accuracy(output, target, topk=(1, 5)) + losses.update(loss.item(), batch_size) + acc1.update(top1[0].item(), batch_size) + acc5.update(top5[0].item(), batch_size) + + # Calculate the time it takes to fully train a batch of data + batch_time.update(time.time() - end) + end = time.time() + + # Write the data during training to the training log file + if batch_index % config.train_print_frequency == 0: + # Record loss during training and output to file + writer.add_scalar("Train/Loss", loss.item(), batch_index + epoch * batches + 1) + progress.display(batch_index + 1) + + # Preload the next batch of data + batch_data = train_prefetcher.next() + + # Add 1 to the number of data batches to ensure that the terminal prints data normally + batch_index += 1 + + +def validate( + ema_model: nn.Module, + data_prefetcher: CUDAPrefetcher, + epoch: int, + writer: SummaryWriter, + mode: str +) -> float: + # Calculate how many batches of data are in each Epoch + batches = len(data_prefetcher) + batch_time = AverageMeter("Time", ":6.3f", Summary.NONE) + acc1 = AverageMeter("Acc@1", ":6.2f", Summary.AVERAGE) + acc5 = AverageMeter("Acc@5", ":6.2f", Summary.AVERAGE) + progress = ProgressMeter(batches, [batch_time, acc1, acc5], prefix=f"{mode}: ") + + # Put the exponential moving average model in the verification mode + ema_model.eval() + + # Initialize the number of data batches to print logs on the terminal + batch_index = 0 + + # Initialize the data loader and load the first batch of data + data_prefetcher.reset() + batch_data = data_prefetcher.next() + + # Get the initialization test time + end = time.time() + + with torch.no_grad(): + while batch_data is not None: + # Transfer in-memory data to CUDA devices to speed up training + images = batch_data["image"].to(device=config.device, memory_format=torch.channels_last, non_blocking=True) + target = batch_data["target"].to(device=config.device, non_blocking=True) + + # Get batch size + batch_size = images.size(0) + + # Inference + output = ema_model(images) + + # measure accuracy and record loss + top1, top5 = accuracy(output, target, topk=(1, 5)) + acc1.update(top1[0].item(), batch_size) + acc5.update(top5[0].item(), batch_size) + + # Calculate the time it takes to fully train a batch of data + batch_time.update(time.time() - end) + end = time.time() + + # Write the data during training to the training log file + if batch_index % config.valid_print_frequency == 0: + progress.display(batch_index + 1) + + # Preload the next batch of data + batch_data = data_prefetcher.next() + + # Add 1 to the number of data batches to ensure that the terminal prints data normally + batch_index += 1 + + # print metrics + progress.display_summary() + + if mode == "Valid" or mode == "Test": + writer.add_scalar(f"{mode}/Acc@1", acc1.avg, epoch + 1) + else: + raise ValueError("Unsupported mode, please use `Valid` or `Test`.") + + return acc1.avg + + +if __name__ == "__main__": + main() diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/utils.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/utils.py new file mode 100644 index 000000000..64a248571 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/utils.py @@ -0,0 +1,198 @@ +# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +import os +import shutil +from enum import Enum +from typing import Any, Dict, TypeVar, Optional + +import torch +from torch import nn + +__all__ = [ + "accuracy", "load_state_dict", "make_directory", "ovewrite_named_param", "make_divisible", "save_checkpoint", + "Summary", "AverageMeter", "ProgressMeter" +] + +V = TypeVar("V") + + +def accuracy(output, target, topk=(1,)): + """Computes the accuracy over the k top predictions for the specified values of k""" + with torch.no_grad(): + maxk = max(topk) + batch_size = target.size(0) + + _, pred = output.topk(maxk, 1, True, True) + pred = pred.t() + correct = pred.eq(target.view(1, -1).expand_as(pred)) + + results = [] + for k in topk: + correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True) + results.append(correct_k.mul_(100.0 / batch_size)) + return results + + +def load_state_dict( + model: nn.Module, + model_weights_path: str, + ema_model: nn.Module = None, + start_epoch: int = None, + best_acc1: float = None, + optimizer: torch.optim.Optimizer = None, + scheduler: torch.optim.lr_scheduler = None, + load_mode: str = None, +) -> [nn.Module, nn.Module, str, int, float, torch.optim.Optimizer, torch.optim.lr_scheduler]: + # Load model weights + checkpoint = torch.load(model_weights_path, map_location=lambda storage, loc: storage) + + if load_mode == "resume": + # Restore the parameters in the training node to this point + start_epoch = checkpoint["epoch"] + best_acc1 = checkpoint["best_acc1"] + # Load model state dict. Extract the fitted model weights + model_state_dict = model.state_dict() + state_dict = {k: v for k, v in checkpoint["state_dict"].items() if k in model_state_dict.keys()} + # Overwrite the model weights to the current model (base model) + model_state_dict.update(state_dict) + model.load_state_dict(model_state_dict) + # Load ema model state dict. Extract the fitted model weights + ema_model_state_dict = ema_model.state_dict() + ema_state_dict = {k: v for k, v in checkpoint["ema_state_dict"].items() if k in ema_model_state_dict.keys()} + # Overwrite the model weights to the current model (ema model) + ema_model_state_dict.update(ema_state_dict) + ema_model.load_state_dict(ema_model_state_dict) + # Load the optimizer model + optimizer.load_state_dict(checkpoint["optimizer"]) + # Load the scheduler model + scheduler.load_state_dict(checkpoint["scheduler"]) + else: + # Load model state dict. Extract the fitted model weights + model_state_dict = model.state_dict() + state_dict = {k: v for k, v in checkpoint["state_dict"].items() if + k in model_state_dict.keys() and v.size() == model_state_dict[k].size()} + # Overwrite the model weights to the current model + model_state_dict.update(state_dict) + model.load_state_dict(model_state_dict) + + return model, ema_model, start_epoch, best_acc1, optimizer, scheduler + + +def make_directory(dir_path: str) -> None: + if not os.path.exists(dir_path): + os.makedirs(dir_path) + + +def ovewrite_named_param(kwargs: Dict[str, Any], param: str, new_value: V) -> None: + if param in kwargs: + if kwargs[param] != new_value: + raise ValueError(f"The parameter '{param}' expected value {new_value} but got {kwargs[param]} instead.") + else: + kwargs[param] = new_value + + +def make_divisible(v: float, divisor: int, min_value: Optional[int] = None) -> int: + """Copy from: https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py + """ + if min_value is None: + min_value = divisor + new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) + # Make sure that round down does not go down by more than 10%. + if new_v < 0.9 * v: + new_v += divisor + return new_v + + +def save_checkpoint( + state_dict: dict, + file_name: str, + samples_dir: str, + results_dir: str, + is_best: bool = False, + is_last: bool = False, +) -> None: + checkpoint_path = os.path.join(samples_dir, file_name) + torch.save(state_dict, checkpoint_path) + + if is_best: + shutil.copyfile(checkpoint_path, os.path.join(results_dir, "best.pth.tar")) + if is_last: + shutil.copyfile(checkpoint_path, os.path.join(results_dir, "last.pth.tar")) + + +class Summary(Enum): + NONE = 0 + AVERAGE = 1 + SUM = 2 + COUNT = 3 + + +class AverageMeter(object): + def __init__(self, name, fmt=":f", summary_type=Summary.AVERAGE): + self.name = name + self.fmt = fmt + self.summary_type = summary_type + self.reset() + + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / self.count + + def __str__(self): + fmtstr = "{name} {val" + self.fmt + "} ({avg" + self.fmt + "})" + return fmtstr.format(**self.__dict__) + + def summary(self): + if self.summary_type is Summary.NONE: + fmtstr = "" + elif self.summary_type is Summary.AVERAGE: + fmtstr = "{name} {avg:.2f}" + elif self.summary_type is Summary.SUM: + fmtstr = "{name} {sum:.2f}" + elif self.summary_type is Summary.COUNT: + fmtstr = "{name} {count:.2f}" + else: + raise ValueError(f"Invalid summary type {self.summary_type}") + + return fmtstr.format(**self.__dict__) + + +class ProgressMeter(object): + def __init__(self, num_batches, meters, prefix=""): + self.batch_fmtstr = self._get_batch_fmtstr(num_batches) + self.meters = meters + self.prefix = prefix + + def display(self, batch): + entries = [self.prefix + self.batch_fmtstr.format(batch)] + entries += [str(meter) for meter in self.meters] + print("\t".join(entries)) + + def display_summary(self): + entries = [" *"] + entries += [meter.summary() for meter in self.meters] + print(" ".join(entries)) + + def _get_batch_fmtstr(self, num_batches): + num_digits = len(str(num_batches // 1)) + fmt = "{:" + str(num_digits) + "d}" + return "[" + fmt + "/" + fmt.format(num_batches) + "]" diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/coverage.txt b/PyTorch/build-in/Classification/ResNetV1bV1_5/coverage.txt new file mode 100644 index 000000000..ba5c5dd79 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/coverage.txt @@ -0,0 +1,3 @@ +all api: ['_amp_foreach_non_finite_check_and_unscale_', '_amp_update_scale_', '_copy_from', '_has_compatible_shallow_copy_type', '_local_scalar_dense', '_log_softmax', '_log_softmax_backward_data', '_pin_memory', '_reshape_alias', 'add', 'add_', 'addmm', 'as_strided', 'as_strided_', 'convolution', 'convolution_backward', 'copy_stride', 'div', 'eq', 'fill_', 'fused_sgd', 'is_pinned', 'linear', 'max_pool2d', 'maxpool2d_backward', 'maxpool2d_forward', 'mean', 'mm', 'mul', 'mul_', 'native_batch_norm', 'native_batch_norm_backward', 'nll_loss_backward', 'nll_loss_forward', 'reciprocal', 'relu_', 'set_', 'sum', 'threshold_backward', 'topk_out', 'view', 'zero_'], total: 42 +fallback op: [], total: 0 +coverage rate: 100.00% diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/resnet.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/resnet.py new file mode 100644 index 000000000..72505a582 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/resnet.py @@ -0,0 +1,280 @@ +# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +from typing import Any, List, Type, Union, Optional + +import torch +from torch import Tensor +from torch import nn + +__all__ = [ + "ResNet", + "resnet18", +] + + +class _BasicBlock(nn.Module): + expansion: int = 1 + + def __init__( + self, + in_channels: int, + out_channels: int, + stride: int, + downsample: Optional[nn.Module] = None, + groups: int = 1, + base_channels: int = 64, + ) -> None: + super(_BasicBlock, self).__init__() + self.stride = stride + self.downsample = downsample + self.groups = groups + self.base_channels = base_channels + + self.conv1 = nn.Conv2d(in_channels, out_channels, (3, 3), (stride, stride), (1, 1), bias=False) + self.bn1 = nn.BatchNorm2d(out_channels) + self.relu = nn.ReLU(True) + self.conv2 = nn.Conv2d(out_channels, out_channels, (3, 3), (1, 1), (1, 1), bias=False) + self.bn2 = nn.BatchNorm2d(out_channels) + + def forward(self, x: Tensor) -> Tensor: + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out = torch.add(out, identity) + out = self.relu(out) + + return out + + +class _Bottleneck(nn.Module): + expansion: int = 4 + + def __init__( + self, + in_channels: int, + out_channels: int, + stride: int, + downsample: Optional[nn.Module] = None, + groups: int = 1, + base_channels: int = 64, + ) -> None: + super(_Bottleneck, self).__init__() + self.stride = stride + self.downsample = downsample + self.groups = groups + self.base_channels = base_channels + + channels = int(out_channels * (base_channels / 64.0)) * groups + + self.conv1 = nn.Conv2d(in_channels, channels, (1, 1), (1, 1), (0, 0), bias=False) + self.bn1 = nn.BatchNorm2d(channels) + self.conv2 = nn.Conv2d(channels, channels, (3, 3), (stride, stride), (1, 1), groups=groups, bias=False) + self.bn2 = nn.BatchNorm2d(channels) + self.conv3 = nn.Conv2d(channels, int(out_channels * self.expansion), (1, 1), (1, 1), (0, 0), bias=False) + self.bn3 = nn.BatchNorm2d(int(out_channels * self.expansion)) + self.relu = nn.ReLU(True) + + def forward(self, x: Tensor) -> Tensor: + identity = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out = torch.add(out, identity) + out = self.relu(out) + + return out + + +class ResNet(nn.Module): + + def __init__( + self, + arch_cfg: List[int], + block: Type[Union[_BasicBlock, _Bottleneck]], + groups: int = 1, + channels_per_group: int = 64, + num_classes: int = 1000, + ) -> None: + super(ResNet, self).__init__() + self.in_channels = 64 + self.dilation = 1 + self.groups = groups + self.base_channels = channels_per_group + + self.conv1 = nn.Conv2d(3, self.in_channels, (7, 7), (2, 2), (3, 3), bias=False) + self.bn1 = nn.BatchNorm2d(self.in_channels) + self.relu = nn.ReLU(True) + self.maxpool = nn.MaxPool2d((3, 3), (2, 2), (1, 1)) + + self.layer1 = self._make_layer(arch_cfg[0], block, 64, 1) + self.layer2 = self._make_layer(arch_cfg[1], block, 128, 2) + self.layer3 = self._make_layer(arch_cfg[2], block, 256, 2) + self.layer4 = self._make_layer(arch_cfg[3], block, 512, 2) + + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + + self.fc = nn.Linear(512 * block.expansion, num_classes) + + # Initialize neural network weights + self._initialize_weights() + + def _make_layer( + self, + repeat_times: int, + block: Type[Union[_BasicBlock, _Bottleneck]], + channels: int, + stride: int = 1, + ) -> nn.Sequential: + downsample = None + + if stride != 1 or self.in_channels != channels * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.in_channels, channels * block.expansion, (1, 1), (stride, stride), (0, 0), bias=False), + nn.BatchNorm2d(channels * block.expansion), + ) + + layers = [ + block( + self.in_channels, + channels, + stride, + downsample, + self.groups, + self.base_channels + ) + ] + self.in_channels = channels * block.expansion + for _ in range(1, repeat_times): + layers.append( + block( + self.in_channels, + channels, + 1, + None, + self.groups, + self.base_channels, + ) + ) + + return nn.Sequential(*layers) + + def forward(self, x: Tensor) -> Tensor: + out = self._forward_impl(x) + + return out + + # Support torch.script function + def _forward_impl(self, x: Tensor) -> Tensor: + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + out = self.maxpool(out) + + out = self.layer1(out) + out = self.layer2(out) + out = self.layer3(out) + out = self.layer4(out) + + out = self.avgpool(out) + out = torch.flatten(out, 1) + out = self.fc(out) + + return out + + def _initialize_weights(self) -> None: + for module in self.modules(): + if isinstance(module, nn.Conv2d): + nn.init.kaiming_normal_(module.weight, mode="fan_out", nonlinearity="relu") + elif isinstance(module, (nn.BatchNorm2d, nn.GroupNorm)): + nn.init.constant_(module.weight, 1) + nn.init.constant_(module.bias, 0) + + +def resnet18(**kwargs: Any) -> ResNet: + model = ResNet([2, 2, 2, 2], _BasicBlock, **kwargs) + + return model + + +def resnet34(**kwargs: Any) -> ResNet: + model = ResNet([3, 4, 6, 3], _BasicBlock, **kwargs) + + return model + + +def resnet50(**kwargs: Any) -> ResNet: + model = ResNet([3, 4, 6, 3], _Bottleneck, **kwargs) + + return model + + +def resnet101(**kwargs: Any) -> ResNet: + model = ResNet([3, 4, 23, 3], _Bottleneck, **kwargs) + + return model + + +def resnet152(**kwargs: Any) -> ResNet: + model = ResNet([3, 8, 36, 3], _Bottleneck, **kwargs) + + return model + +def Model(num_classes = 1000, variant = "18"): + """ + 简单工厂函数,返回 ResNet 模型 + variant: '18', '34', '50', '101', '152' + kwargs: 传递给 resnet 函数的其他参数 + """ + """ + 简单工厂函数,返回 ResNet 模型 + variant: '18', '34', '50', '101', '152' + 只传 num_classes + """ + if variant == "18": + return resnet18(num_classes=num_classes) + elif variant == "34": + return resnet34(num_classes=num_classes) + elif variant == "50": + return resnet50(num_classes=num_classes) + elif variant == "101": + return resnet101(num_classes=num_classes) + elif variant == "152": + return resnet152(num_classes=num_classes) + else: + raise ValueError(f"Unknown variant: {variant}") + +if __name__ == "__main__": + m = Model(num_classes=100) + print(m) \ No newline at end of file diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/resnet_loss.jpg b/PyTorch/build-in/Classification/ResNetV1bV1_5/resnet_loss.jpg new file mode 100644 index 0000000000000000000000000000000000000000..65b607e3e1adcc83bd05d740eef57d22a9c68a3d GIT binary patch literal 35594 zcmeEu1z225wr&$N!9s9Pkl^m_7J|ElV1cG_4+I)WAPFu365QS03Be&~W1(?(cjg(NByJ}ahzy9^FRU1AHUj#gSCa)w9Kte(SSR$SPI20fQ zz(7MoM?=LxM@PSZAL9WQ0X7yUCKl-RBpSUI_P zd3mWBgv127McH|HxqnuIbpQT+EKDpCY-|$lClpV(|Lh0+D*zAUo<2%BG7=5o9v%`h z9umA0Kn(yOp(0xQv%&xSfpiZU1r-e)6>56tj&Cx?T9&?w#`wxhSNl3})pE58qG4t~A3kV7c z%gD;fD<~>G)6&+_)zddHw6L^#Wo-krb#-(1@br4^{q|i@@cR!RLtH#t6l6RU)F)hMkEArv&7ARRxC1Z< zq@yz`+V9iyXdV)pyNo^{qT^knKl<6VUwZcc*0I39sAvDyvH#Pr82}bC5@PU>@c|cP6uIC?Tjp+(Iuaw9YZ#WeOYgBpC za-@#XdlLM}A*Vmw1WO!pnrJ+MZ@JlOHI&1%DdOWf#dC`<$9NOE{2oNeVMXRYFV41n zXrJ9%%3ySuo0s8*Z%-q~}6QKqI!ajD^%;84O+E8~h=|9-Zw4>LfmQZO`Z^oqth{jW<|~ z|2mM^6811B;CxP!GBpsu?O8l&#*c z%FPor2kSd6G!z*0P}!H%KZH4E@O>Z%B}tGI;e zD=BKcr0_=FvxINpW5LS>*tdt^Ene+|5Yyx5qB3y63;WwHfE01)s}V=o!1A8OGkg^X zcDBXxiWHLs56DECb00P1M!_bFJRH!|N(P~PbfWGRDjSn_3e=Ym3H-=4V?O5gDo?H<3}mYheAIC zZuBGHVv0LgNvgQw%MPg#4)dz<8r0x**%K5Hd9Myn_+B+-(f%}{kJ2P7-<+7^=|Eh4 zjkTKrp(08Vp`9hp;>kjdGpzySmg$JLvkElES7{q<*b+8~&-q1@yM!zXN1tDHKF{6w zQlSn;xK`SWb^pAmrGm3P%r{dlfohHs*;;jq-BHxO)TUAaqqe-RCjP0qXDr8;Cg&lp zW2~1rcecK+UBUwIu`ml?Tyd+VG1G;wMIVj7lQcqZ=w=BQXBxTXInwLwF&tm4Bqs6@ z*GqJ6vq=!Vw)AtGIuZ}HK0*@JaP@vKGtL`rZ(V@;ls-l%?R%sXHIpcn_uhFrPQMz_ zCf=nVn|qC*Y7!GB#ZLTu=OUvzw~-LJb1E)Duc%eJF_73hzwLOp1OvZMlv~<@fdth= z#x*)ZXz=w}v9Q*qAsnDR;plLx%+mHqDSwMt`92zL9p)3wY*af%r?2IO(LX({Y%YK*5FGTYv8VIbhWH&V3?6p_iGet+s1jiF)i7u zNy7s&-=%j_0`YKwM51J5n5AX;w&1W}@wBK(#^q;YU?STAG&g4cc{5iwIOlza zT-O)w3?HkvTFJevCBt)sb8_ZW9ADe7ae`h&SYoeRn!P5WY9L9+ecpOh_r7{h*Pea- zHuUltdYvMjCwaHjw}ReoZUtmo!9nA zNn z8SZ;93kv_jbRxF)MXW<#mUl-~cf`wb=Nb84!6#A`%@u(nM!h8~F*NF}n_p7M%?8kr z!&c9_7^a&!te04Pa*uRw6=#|gCL5cKf_^w@2VcEuNy0J82dplRpl5E*?5C8?^f~8# zKkgyFa%?637*~$-pdQ6Z!Qm{WaJ$aBn~Xikf+Jy_uCA__0Dtu<0S9gP&`$5h1KTwJ znmrk#+VP;d@1EzAb@y0QR9Rdf%;kPnngKUd4Aq#^`pwM?jx0wr4DMuL;ui|msGBSpjS6t!r=FN zr<=EOo~Su{T_0$@kfXdN>=q#|KKJsKI(G59UB4+Ek=c z>yF70@HfvRtShC2_)(*XtldKAtJte4rt~?5?9B!2nmaYmbCuFG(MPj(thHmp`SNyd zC@kzg*7N>i3Tg=KC2I#m&l3#k7Br3Y?DGVkG zHm{J&(AkZ`0k35k*~O;#HiS!>=jkDDe2O+}tBki2B43)_C#AMOCgEK}S_?2ukm#x- zY`tFpvVu$HrE7|-=#Zcyfd4`@y0RFrS`Nidk>r)R_=`4nA9dtnKYF>+O9|tK>XVWh zWo6J0GdMst!TduViv3U$&$pQDjeOd-hA`5j`OqSn9DVWpnC&T|-tVHhIx&*|^|~_; z%Z2(2R?s6@V#jKXNW`fwJ37W@v%Q-!3;XD^jvh;oJ)9QP4n`M0%gC3bpu)Lysh~+$ zkx|7A2zy0?yQuiO(ot^~B2u>X^Sqkv!!l)Wn}!rzzfsF^#&hfI#8W zcX;d(Y@Ie^)w9;C+9>5cLdXSTXN$ z?AH^u-!kN+6liaR1A=PEo05VLuQ%;2K)$Z{xXjXWmfem5 zH?Iph1GQDyt->+FKI0uqQQWR>c+y693vRvPD^XbHzy{7VgUU_5R&s-#D^55TT_hf? ziXs&lC>MIIXniD`@7=}}-+Gv<*DGy`9z`C%QBe{ZtUESyD>NE!$#~p-L9V9rNJJ5R z^_#zN|38Amkl|ReCE)$8(YLn1if`B8gl(l>KALNa8AncT`BjtJ0}yuJ{*6t=XXx(E zx}AvgL9!n9BJ^W*-y3=$S`G*dD)=G7i)YlGW(6g&@6~pIrhPh~i%}3-_7@{MhYhRL zfQoMF+3X<)%Ax@eeU+Be>|IgUnmviCdPE;#_pz#y>U#$Y^7UQ@F83dFwLVN#N>5S& zI@F3Uj*Hc2Q1qlr_ZO&n+}C<-L2j3q`?92(AyVInK#oek%-{{e1+S`;y7?XhMp`O@D-!& zG7+#Tkav-AlI0D44Hn*j1D22uJ?|bgGQa?G2x5(}QViU;ZJRoy5{LIVU{C#pv47UP z`O7I9;DAfXHPpz#k8M#0&n0ym)X;=%#iH2!#VarsKT*rQTmoQ3{4Vq(%pi z6lH=JQA}@DsQ=aSe_iu`&6fYk7V6iK2dbNTavJ%lCs*2h`luyD?ZonSTN#Vup-}Il zv+k`#){K{{RP~rWy>-Z(;t8EyRR#T*k7uk(x3-bLMJkCug{Bd7sTPUmwiI&6&A`=^ z#BYEq^0&x@H1Z3k%AdlHVeZ3lz@GwD)}KNX!0ne>M}IkiD;)4!r26fcNm8Qytk>-i zunht6A%AUSK?ls%flSQYsXW|c5A1b_h9Py3{#PX-T{r?zqEqVI zdw*bENxx!U*06`e$l#H>bU0v<;1}8z>_rX>YC%U3#>`&`WA1-@!*LM}zhy-JLqn1= z9zS%!^8*~^BSri;!@+>^1%Twn;h>5s*y%Dy*5WD#fp&J&-T=s^W&pn!Isx_}> zW-vu;_V!*zV9=6lGDb1yp8}ki4H=P>kJ@hi?b~))^1xe`e4|rBE8r^t6g;h`3I~k! zg0j-@g)sgHg*j-l0%(staB!khD>@#!ZZ#;oLBi5EoiDBB5z^onVy8(rN>@>C=|rmU ziI$?9awc2zGBbn`@Q+*yj{%P6DApJ~S2`gG1YJ3oxL#lQu8soL8}ZY&Hp*HGcuJag}!X@|%WDoIcjT&F{HRGOh>X5cgCpe(r4ZNc3 zw#U#(GC zzO*37Krk+TT2|q!2x#dcO@6i2RyEhCv89G!W+(^-N0*D z0p8U5I2+_y#RQvsZ^m)VynL%o*S8GMSSL*Kf=%rLEPxO#fa=H3&dE=lQpb6vby8|R z@mS<`E|{o%^(HoJwpNI2j=2n^gCTA=MScI-KPo%&*bdZ*i#|E*H} znEU3gI|~RG^RD$r@x0Zm%IFEHc+qHQ%wM&!wJL|^k~+U5{dVzx%+d%b{u z7bc3e1{U-_V$w-^KJH9H{(kQ3)#bLplbeZ!%ct8c!}os-prufy5m{;n(meOV>2R?` zuBfPY5~T6x+q8*je(otfbWw6{@~S=OAxHxW8sRE4x+q>(H0={ko zAjGS!)AIsTo9PjMXrqs}xn4_Ch??cTMY8#Z34^(v+&^E#^D!+asE(gFvhyv(J|@3|-XaM)iu zIXQ+KN#t4JYZ`x%nf8W}8enNoJO|Ns{83Z9zC~RPvwvD6rnD-0!_$o6*jeKGe4qNU=3)gf$c&S(WJQNGZ^CDKW&b0T?+NI-eT=_j*XhZ?%e| zsF;L3ik9QYb7Pc6Fq3TT%|bYhtD~P_q+?a_)RCU;GWXmvkiF6Mem7OCcdkctkCrX@ zs4Q8wq?Wr$zX;9o)5F<^t|5S}%~B6A5nFC*fi#=*qw!A_1OGV6@eha{{ zz6Dy%>CRMJHGZFkyXDOX&eZBg9`g;7hS101oIAIh?u++-p14 zoE&Vs^-+SIzyUn^^GI+&jOS9>4Js78nUjta@*l3B|IylN7el8}9V49inDu)P0BJnd zV8-zyFWW1Rx5|^%J6qA7)gE<9EX*6EPn1^ZTn{A4+;rF!^dP(+##Gcasw)*lG)k@Y zJE$e~!zSuiBWz;yQ(g!l#f`6WieZRtBB%bv>wZJpLtC(2JsDcsV^E*sNyy0>!wc}R zT7ChaiUTO=e6-rU@`bUhIGasP_Mnd58{e8bveo4>>9{a<5h>Vbo^Wy6_y(i* zQjX5CWhLVidyF?fY+wkNWeCQJE!aMOSVa?=g(IZP8i#VCs6%Rq;MkVrW>- zPV63AqE#v}i?$$04 z3}ZEdT;D2ktwEX+6OcIwvVWEBob1A-B3 zn2b=61z7H*)U?we=CbfZct5Ue+tXVRl2<*xrnIKq@IoM1GL)31#>ndl!Q$SA=#<>= zwZi}%Q{bJbEE}<6pgqqGJDZg|UEI8yQ0$(#kG!4(5=t_^suLQT3wi0KNT;W;!Us1M zc3)0?TZocNm2q>R{+B}&{-DQ!Uze()aAiUcM6V<^W=sdc-I3yISLX>Qrbp>Gm|d+X z9JH;q`XxEzm{rr9(s=`tIjTx?$$|F|!t`Y9mpO1Z`E_S>bCxM-=Fi74 zmJ@|IEaY_`h;X4VL>r~PNjur&SZ`a=sKDw{4S^Ll>W5Iy|rP;V&uJ_L$+lsEZ%@(erZX*yuIm?qEy*J!N9Y zrLSLiOw_-81sL`MHjzw^a(;D%0n2P}o)orcEf@}1WX-m_I#Wb%=@Dxw**k>Pi_lrJ zlGKvF16V{FZoZ+9c8TrbfXw*XW%-C7Z~aO3&5^cZRLuYMOMhYJzcA#& zV#_5bp5Tc!_8~KdST8E-9+T$%jj{ScD&QNoWo&_t4Z}zcNRmp7v{eLM%-|CY!qCUa zW+;)Lt!WF^o4%Mwc%?E~7|^;iZ>~HESPT>i$+BL$BUTIaVyzCvv~eIP9__IEP>I}* z1!$*8tw!p-49aOfI(vVvGns}xJ!T0OzZ2c0`o z4>Ir5-YmT(jE?)F2(YJn)QtNnWTtuKG+cg$9JI&Wy^R6DlM`zc?uigrmq(WxE0rT~ zl8u%PrLehI`1q^3FhE2dop?p)41(RP37qlXN}IX%V<$TMK7C`Xf4A@n-59s+VEEbI z&Tss+5hOVR^*=3ylwF&xcI>F->GDO;2{)wW}TyV8wVpSmRSM-_RRR zk{emoWk#QI(2l7GS+zoYO zfm!+`C7Jdb16*qxw^subxG~rzC7dKgBFh~&LiTy0 zqjiXdq*QGOVW8{AV{==s*movlw`phA>w$kf=|9;kTHAH)HQB~3G8{m6&|!Lxi7>iR zWfR2PBFBtU@SnIMe}%H}ztsq&A+H6PB>%!WA*AiJl5F!onQw$L>JxjX7adcg zsHY8>?__2fE|L~%=m(oqxu!NugkQ_2kp2+c+B%?+c(DgFsxL3Efv{#(RRx&`as_GJ z4i{YH?WVT=;Gn;5g(>H0`j+hN8l8YM_tvjZNppemiE{5eJTx$t)CMD6)Liu#GYe|x zrel;0Pzf{?&3+m>ir!V=uyBk%OC;`NkohOsR}mu%>I;b`0`J{ zz7ZkF=`P3^BQ(ALHs_ax@T^1ZD5B2-q>tlJnk^U(*hbn12~C8trXE-*YJJ+D9B?rj zbP(i-ADWjCf9c9kj`_fo6@i68Ww-P*B_ojLUf$q0CNE+}nQJZ|3Q) zq-D}9=H`?~wakZfp?C`mq+%bjMc<^ocG4`sqS&D!=pge-Brr35ellGkTyT* zrCe4{!U3k2Z-E(f{P+fPIW-#z`Ls&NO6%|Fy0aZ0+aohz)+ghKY&DbmQe`ZOFlAPS z3~GJ%Z@=G9t3WFcRl9J>jH=5OjsEDYU@Z6ZH zb$y#}NTLRXQa2s@?*v@jU2U1P{9z7Jq1oiGCyIE{rRqW{Y)v9~{LxeZ=DB6go+2-3 z*-E1wW9cARq|Q^Mn__UlolYbczSpGvypf@og8*wqKt-96P~R!Sev?n#h%R0T)GIfQ z`}p4DpF^!!D*do|02@X~@Mlw&A1X348MQTn)HwmCg?TDVQ_TnBcP$cSoguN;9ixVn zO$c|-)IVB|0*_9ECL2L)^1lK+?xo2Q(*H+TM-dspt8`F2lm(*6o{auNOuJ66Xcp;* zyJ13=;Fp*i2lA!jAAZy-cAp~h>yh>wHO#&o*3pdDZ0kxol6``Lu$nK6TR17s`Ccx@ zZ-Gw|p%Pul$&tOOel@(sSBvq5V>hSZ7)qnO`*m5(etUZBLzN+ng%5vs8vKQ#{NJOl zso#TGQ2Ot|@n24_X&UN6_9sEWDTGWTzZ1iq{W2^Wi22tPA%q_VOG zVwX`_8SuC^NLqmm3-w;Hh9t#CI2^EwjBpstOhn)SP4a4McKrb2;_N~zq7B6E$ z`EL-3*p7t^aQiJ_{@#0TM@}h*xJVu#H1$glVW+L9W=?MO@~QEF-J7J5w+FIDz?JD^ z@tQeZu1J*-C0?|kNs;`y!=5$Y1ux_s_1vw=FzAK5iucSyfy09wQYj7}w>u6*#A!;w zz`_9UEiK^Z`rRVj5Q%22f?YEI-UC|dR`4dh=Q2|?R?xC74KuLVo%*w>d(p~!^pGNC#9kC<663- zB^(fv)W@ATgR_3u7B?{3tJZ@`_e7V12e3!!^Vw3#VfDtNz)(-pm1)% zQXkI3Q!QtkyvqzfC-L`JiSv_=xXv?QlE}ljkA@Usjvo3u(tU) zbifQb+{Du(Px=Zv>I-9Y7rLysM*UB!u*~NeU zQ9OvM4lT~bZhjj$zh)No7~Nb}38krWg*F{MgT;j7T@A*W8pf7@^IXpSjpD`k%KHL> z>f~&(vGosqHu!9PY36qneTA2@<6g`{tB2n*p?bvAMYb#m?q~rgIkyt+OWu(;7um$q zv4urt0g^qJ{e`GWw-X|)ulN=9$leq|TjQiVtul5}Jf;6hM0<;tY#haE5#^9Ma?Xd(IPVRHgc< z;=)P`yK)-6VWtJuc3ZsjgTO0TFiZO3MMS0-OqtT^86rFT!7Pr{`K;gt-IR&`3w8*# z*5TEZnbwER}wz1TeKzSaXD+ zPnFYE+0^w3$EY0P$MV0$Ed6`b@prL=KSn8qeoJ#)Q-4GK|LjGCg+h&2oCV|={hgSo zeMQp?D=!&0kFUbuI%gTUKIu_sAn?5W3oaVxoZ7Q*EuXZLYElOCK@0?C?iSJw5;)+_ zq|U@r{{>d|1^X)MU4eOxQ;EsO{`FgvJT>}ivhLKjBQA*1U>NR|O~?s?dcDuP}T@3*tmPA<%IuOO=5CwM1D5MvnpdkFrI z_8B59F;O=%C9z0-#o$MVh=rQe!9ol-sqc(@v0Y(*v)JArj2g$a8xA=BDWjK{roaI? zQBw-W|DF{8`P+GO3V+Du11q6}Pr`9X>R}WV8>dCpVHx8^Sb>~!&zJLviL00&ImN`x zVFl`M1ki^_(Qx)UFU)5bx8XvK4YtfDmmYl1c9&KZNf1ImD|wd4?V6iI|7ujnJWGJ2 zDJvQxbvPADH|7nTN!{8LqmA-Cz4p8y(vSCJYi>H?_4qb4yG2YKt8X;Rn%zIvMJ`fdZ1GBCs???REN2CC0&>gY z?o3B?(%wp!;$X4x@ME@4^cSBX>F<7V1DlO$DP>!a42-rcB;)tj2c8FGzAPbAz$Z$f zps;@>Z;TYioqQrm^-)fD+#=PUd!OYMi_>9aXM1f_mnx2rtFhiY-4mIK-O~~m^0V>M z;*Sx_Dz8A();8x?X{_(@+8!Tr1xoTFEoc}Ihn5u6RQsxFAaarPm!&A2_2-)9R_XR0 z6a?-o0g$615j8nW?FEt8z%0YZ+u&DfHF%L{pGk)Vhf4(q0qVaAPUEeJ`3))BpVqur zCq~wTbe}^u9v*q(A@Z5DtYKoqsx{ExiG%#cLHL#K`^R_*ngH}^rJ zHVCOwHWhKdsk<3`FBeTpDZ_&G&>PRV?k#08PS(VGVcVwx~6sIavf|vsAQSBfbzT+{l<&!T-m_;ve}H0g%5l_1$oP z&rU|r<$h-j{x}x@w^uak&@+(~>TgqZlR-q<)2)^tOw4uNb)(9hLX`1fgRYG)a@fj3 z+(OC0gk8%dWkja@JQS!Uaq8u9&zboPUKYGf-Y3~IQLQNfTVg1j>*n}9olC2bTHoy! zzn~=@rj{|zY2JFh9FFAHhG}c4&1EvcH(RM+j8}P=YDFHBs=99c%3q}EvmXS+OJx`R z`a|Ek?6g~NxAr-0&CB*BjF@%hrV>+~mKOdoBN5uNyN~?1Y`N|n)<|-$$5fb-9Sb!JN+e8oN z6fP`{S4J+V1B&4QLg0Jd!->60^d;0yB7OVx_Z43Po*0OfgoDYsY))WHU*g!p|CCAHnkoRPu zm*U!uPB~&zuH!iFOD74k?H%t5jilOcg6uU^Uxa*8Jb3RPTtoS1)=hq8lBkqw&qKB9 zFm^5Ca5i&Ho5Q!64i}&`J6kXi`R6Di98P`jNYig6lYUvbn0+Os7Ev^ctN+79;0E_X=^XuO zW8?P;*1cxA^w}BKk92w%I&@l5ByJJ`)Hynb5qVE@(q-AXIoR}}#!TCkf*aeMV6t6% z)tH#AD>uOyr^4myos(?`go_dV5AS09v%40**0cH`tPGGE^7?3nX?k(HH+S7)dQ@>S z63cQc%i?>fHB`RwZg2qJRjiTWUiB22cJSN}>kl6-X1wDzsLtY+1*we=0~d^#l$)ID zqMZ91j}_LUue8l5fa7k$d{=Fw)5V5y&urVY;Q;#Uq7*SgHKlRdiRvVy%huha_dOZ= z63Ee)I{l-$DPQjAmIj6}W^*D~*?(Ey)L&Wm-+?Xugmm3;!zH(Y+UnsMS$6ZUCGFsy`(;KaIwegrw1AFBtupg5~6?m{OFdr&Xdae7D z7Ma-2Bp|kJl)KHTRQE2H6UPiwFwr)NRMIT9BguC&gry1*W-L*@STR0qNx7TJoa4XD zJ?}CMy9iwa6RYm}sd@#!P7|t5wXZ(5J^SJl`K09H@EgK%0!+`4FTf zJ-Rr;t8BtPH?Oz{p|X6x?m-uCv9H8{NX)Y6_P$Gwf^LCu(%I3<^1}D>hMp|%)55BA zhi`pSQ$Tg4|K7p+>*DQJ?O@KSQn#Cwrsw8HE@<2>7~)qY&vNn=u$1r8?Gs<`X@r@n z4fd{A7Ux(oHW|!ayTh6i$DEr&^;*-!)21wtzqK7PO`MtH)ZNEQ1J+QV_FV^@iu&XDYsbzyghFAr#arUBEeMqaJny27^ZR6MwPW4bHlw>Q6XS0)FFNtG7V@D}!1-+|kO zfO+W5nsa`plW+4gLwd)5ZKwZe$NvMg=3n!tpT`!;_PLu!stmLCj#dvwx%b1BDDIGo z7{&8DD`%$Qlp_mApT{MUOFzxFiHf4kl3Zw?qgqb3%H&_SzZk@RS;>Lw}nMRA=d zCxw;CTHD}?^9jg|ifEb`snJWo0pPeM$g7A64q$I2SC4Iae*!G$=|*?F zsOTor6f)9Zf(}vvUSEHqqM8~id|KV|!N|5Y&%O&GWsD?Ds9o)p}SMIz~4A9Z|0ov03AH z1VF$h0%HGcQT7)UPLD#OTf{l*FuIJ@9)IJQRnxpIcJGQ^7o)l6>7so474$=|rdmI` zI?vFOKhAo{>aAn(3!v!_ChXl>-6_4NETN$`C@1;TU9!$m&`JeC-N&vG%V6Z~Devwe zwywF}hWnnZ>O-R+R%7@C(PXKeuh*P#RneR!aekwr;IF9R*~GLvX7?29i)Zusim7DI zKKX>_iAVd&^()G0>CB%mGyYB%?>!_E(U9*Dd-E~UJ@N6Jr*WzC`0X0w0oyF9m1l$E zMo>hCap-`@BT3&Ljmt^GxPNv zloK`9hBbWym^Zn#phZ-Y(PQf|=B1!*@9>Srt)Z9rd^ECQi#c)w<~-Rhs;7H2j1o$x z$!O^=ghF+?@rxoudyiDHmv*dyjUo-pD?$ZLUgF2amn1b-7d*u6(qLJ)Dvq~`S7lw` zueV_mK^*&S?QlT*?NBSi^~C=659WIQom|a`P#SLGU|o{Or!)=thtLjZulLU;?5azI zA-h=n<{_gBpEtdKZ+2wl>SZO z4bphxcxJ;qC8&R<&2dNhOh9KoxlhCsR#PX#uTf~QHHsnH1dWqu;igRL5fj0gh%T0$ zryJdf+=WbU6UnNf#Dt+lS%wKXImx;6PW@pjVsS@DDdYe#hLXoK*c!#ai`UUY(%BT& zl-G#$8!&LVUfH-GPWZzlHZlg0cG~cGTO$9Quf%vLV`(5%L13OL&VD}uUHo$;q9EL)QU zXEj6LLW&d0%iXfsNf^0XGm zhqAQKQiIP3b*D5dTIzUa=Gu8>in9F!&Q1$GQyE8!r3A=I@@Wf;1E9yoi8=QLEZ=Pq zl>R7@b7?zFqVF3@$lOjCy@-1<^Ueo<^3&z|3{uh_wSJYxMXHfjwa+S9o0(Qc+9cD3 za3XVRTH&aBXJZE6@ZRf)tfQj1Boz@JZODIrVEuoRAOCy)0SkU76_tt^ZGGLR znQad^fUjokOl#9u`&xI7P$M?9@u`NUu63FT$S}Bka)xEgk@~}vPo#9{cFv65R@j@q z64w(D8)31CXv<0T{+Mm4^*IOoy(bI>!*sfnXs!XNUyAP(H5+Q22=pa7vK-tU_4iMW->vx1i zjunBwq2?;V0n~blDN}bIO$=A&jb5{TkA!rkzfzWr`;fW27oM?Y3C=z6oMd1@>|Kl7 zOAwa$aIojUMTl^ieE{AX=Vhe8g`HwnKE<9Rqh50tu@2`eJsmj-yM@kJ;tJ&@d$39B zdsb0bD+lTANwVP2bC_yKOqZNPVCGF$K5{YDX=4N99#Pp97mCGeY8hP&ER2^J2(Qsv z{jjRtVaSieGTI@D;B6X6xFZ>dCU7U%X-imFpvH94b^osd={C^9pI8T0S$Qv6SJ z75;-HD?G81<`~vUl;_(nBcbl4hYEPum&iHiB6F0r-LYwgwPvitKI-j1dk-obnPgAS7-aTd$bt?Z+=PEx5s}Z0 z->y5p2_4r32E#g3@MhYe*19`Rm-N0oJIZFhzRn^Fp1FMam{BkrCsl7}6}Y0%+J_Z+7XGCCp0{Du6ky>RaIw+WSg}iwNEQ3in!I65=NNDx zNmi>vM}^&Fz?10EM0#XahGJA;UWtHHBB0d46txG)qLyw;EUewRnn`(BaUo;GWt+L8 z7Qauf96t?}RiT=4NI_q@*7opjo+4I8nX#H{coXNTt{0H1W;-TW-<3n$HV2b;{U(&V zT8qGy@4aMYA=PaO>Z6(C=B8JUW=}~XFL3GZm_cC5O&6s#6J@@4cgxK|S+v{Uel>MO zSyrM4D4rU;`CSGA*k2=>wX2{~?$im+D6R|_mXaOEe9g9FskJw0w?gj5#FM9noE4=V z#(ROyz1|HIt4A+;UO%a!PGV){m=ntH--5dv8HnB^qkNh7)6X+ z_`wTw3;(APviMZ-frTz?( zw?w1>QPAb&aC5f1Z`0sgapwsblqN$nC+^4m)+j}Y1-d_4doWk&j*L%q( zNNL$hB}Dd1u!`zZMN|_ZK?Rc6Rf4|)3K?MZC^7_be?bai_hPmO^R|z;Wlyqt^9#9q z%-`(4RwsN-cLvan46fmysCIAkfhzaR_l_BUJIsH)SfD^3_ufgx%GQvo&TodO99#vi zb8DctAle{0W)0E!#E;3;4N0bwB55zd#oD|u)Fyj3bu2xV7_>6~yxe*qEKcV^_F^(K zjv}d$AOVtT_pU^FNw7S)#%-Sm4p57lQZiNx7Zu5rLv1agihP+EJ#l(20CEV_RGF1b zxg35RJ=tXYm-(!JsjrLpU&iaqt*I*kqUfr$6^)uUu0i_!FZ8`1q`=O$6{WMX-gk&C zfPEvm2e~(=x8XY8Av1d?x8FBQIP8jF={wBqFD7?8s3Ai`tb&U~FC;9R%M)i-P&N+a zzk2i8YAF8@tm$K+hk!37KHL??yg6#}Mevk>OHzE>M8eq;?C+BiM-`e38tn%cLC<;F z2)|g8=>#}r1T}(1bqb2@*VL?q+G~UZ zxCG5sCwE$hZ$k^nWCLjB6f;lhmKD#BaH*9$WJdyL=_nr=kP`Db2>M+d>aW~HQ?OgiFvta8IsXGJr zJgQ;MLthL`WLVHNxiN%LG^bF_K+7CB%`y7=ir3~*Mr!fSw2#G}KNk~|_)?-wO(vJW z&0^(fBRaQaVws|mkm{$^*OMb#C`HdWm$!Owq6=9!1`gJd8t|=`9D9y$+Y}E#2Q@Kc zY8wq_y8_KnBldy=`X)atAi{+v~zRqIjnGsOAMgCZcH@_R8nnmENHBHkM5r z4TDMHqWM{pB-NI>Z?Ua5=VcH+lUqsi=y(+dt*>+r`rNo}hHCgqa!M`YR;+xwSN3JS zKQgNhYLeLaR*dRwZrh_g9-&VW@|9*;`yi`13X48-8XQ5soG2WhZ6Judaep$JpEb0+ zH(MhI2;|0*Bh2{x|C~ywF(}Hv)e6+4gPoKkpB0|Zq1F=y$!+X)4kE)^EzbzAczWI@6P^%_>K#@I}dJ zd~|8M>$tw@nwYUrrD&S%t*LK1xs@O{BSZ2^dA?oE4z~QN6o|Izq3Ux>-0zU{)6%T( zrRU=>zLg|5n8aU`x#^0wKGy!StRkQp7abB4Wl7<)Ui@O47$E}IL2?_o*%dX3tyOVs z{i;-QA<(|a?K6C(lH1V?IcuoN!Z1)Y1~@Op}YL~ub1~LeKKABlrx-#`*3pT*_$>Y{;zSuO+Z_d`Bq=w0kCPL&i zaFu%gSp^!F&?c_xxR15RC#uxMYg?*5o7wo*M(>|Pd`_fSJGAFsAU0z47430=S{cEB z7>%+~T&jxd`J%skAKex1l5~N9qbry%UD!$Q;{4b<(M?0n&itpw{y!oc?{{RfznUt4 zpLipYv!fJxBe?cbFbcc0O`c*hg{#As+t{=_e@SqtBNZ1ArNB~0dyCr@& zE?@geSMx5A*v!O!kQ@F{+Q`>3ay(0|>cy}L^3LBD=1(e8sD$;Cc5`BueHVY9f6Ma1 zYhcBc*J^hSH(=`rVsqE}i`=GS8y`MwJ2B zLQ8KGq#6=p*a=XA@brL=U~-hzC|m3akh zIFf@|iQ{R)wq4bIS66=Mi3%*ZWxdetfc|N9L&_ZYpgqyraGs%?@TWSWN3&gdv&A2r z%D*>}Uc24G0nZRAp6}8O)XrKTwW8mB)Y(M5ZQYhg1Y_Yf7>Kp#y1;&Ef@;XEsUd*D`KB)5{{5be z9YX@mbG-zf*gNz+dnz5m>o5G1jpG7Lh|G*xN!;Pj9u-v6IgNFbYYpJ4d8ibcpW2** zozF(dL}uRh(b`(m&B8{*i+SVONmG~GTAq#ahK9;uqVZP|+Be7F?j_ucXjtS9Sgj-O z-283ZcPHos>d~>yA{|m@hU4JSTAXGt{^CXIhwczczj;e+ccATpe)=_Ohaw}C}4e~g~C94TuLK=+vqP7F(mg_eCb1?i>43+ArG*1Kn`+1X`cVkq@Z%?-7I_LTOyN<0a19{|4SE2`C@I{YW0 z1EJ0=r8G9`OE|cqXM1`RIuB_Wb&vr_{!F%UPuB*SBt}#VmE*Zg3!>4|is?HY`RmrZ zi!l|>bxb-Sv^nA@G9lMn>#6%E78VyQ><^tI;tJ$AinP|&JVAqXR~2{Wc>>&(eo7pD z1l@5~=ZwYks4*B38kSe(ug|U%Q^{@ErtBW}JUo@dye%|jkyFK+pcT+`8LfyUaIx@a zv52m>Cg@|cU-vzRb{>Q|SfY%N>j>sG(kqpG9uEi=)c?f-Ns z?f2xhzx1=<<*}JPk6CCJ_qAfc`yB%Tah$`wvFNAKwhaf5UR%QflwA%QI;8y~BKVB* zrjHu>B5`*TxrTy@u10X;y@O38PjQ5(pMlR8LfF772OXH)7d}Qg5|>-+jfu>6B`~_Q zvsLR|`@`C{GM;!-DqwlJ$uo#L#3=7tr3&d1FERQSy!7FEI|MuzEOQ&*D6ihH! z?Q@h2@pXNc+J*K5p#m@q!p~krXVW=Pn1cgQ*4viE#t@lbks>C~E+D(-#jaP&)zzkW z20O28vXn~pmjvS{mpl|xFHcLnzfIi=HzxJ?S_w>_f&9!GUcdpnb%lsK1err}*w`G! zIw$mZe69?;_XzvCmVIB%(?`Go9mEI1Z~j+%*BRAhx~(%*ML>!)sZJ~QPkQNXKozOzK-`pQ}T|INp%sThX zth3Jb?_O)~^?mPpzrEjYzt8h*6#2cPg5U@~FQG&Ip=V5`hS8x+`bv5yQ&I4-Ub$P4 zSZV}$YyVx~g7=n%b*0qtEKc1Z_jMJhjd{c_5xHo84zpDgVH_lY7#B6kR2bg}X_s7z zOs~*GJTN4spRzieN{}7elu3zylj;)krEwfvwvAU6zI^%1FA8D7Z?r=2M4)|{Y9n(= zuXWIah$dTNy1?3Q!R9dFxQ&XPBT-!M-#=S9LUP2qAA!CgW%v(@5GVJD)o$CykQK04 z4jK84n<8j&MX(J zZlKf19AfUY(4Ll~QWCjz-S);J5(T4jp1TFCc0a4D{C>qn;#tbYJck$|p>gh@C#3X) zx3C(j&LEjf{Y+3`uw4@^GP`VMJ@v8UL0toYx(oow2kMEtMXgICHWd`{>(`g_M^i}7 zM4$hqdfR_>L$1#yY~LB@rff);ToZl zdIY-jp6VUiVpX8MU&`e2_;8Io;TB{v5qQ`m5QC4Bl?)&P$rF=#XcZ};y-ENk1sG~q zQUJ2LUn?7wdblAx2Q5$a*(u#Td0(i1C3N;z4|4zMrU~p8@(48fDq*95bTZC%gVVq6 zN842)c+5wK`wyMs2EM=wf%8H>=CA{RC0puc0YJ#V=t4NV_Sy7S=xNvjH(Ag6E=Lhz z@1@0y3(T0PjvgJyuFhuF*@!UKwAid-mvZ>N*FHOUdZ1u3Wm;Nm6yOH4T;<;QJ0*ssk}FumPfae-S`>kQL`K)hV#etc(o;GEuyj>&$@EY zG~4snJ~t452D<5WQ2(0p{1FUiq34;cHR$!lR$IknN9hDW8!&`o8?Um*>RB@!mke zQ$O_B!&9k0QoC{U?-~Y5Npz90mYW$;D&5I;dg}G(j^P#soU19h8w#zRwD@j;1wTr+S6>m4;srxXt2naK-sS>8`?xp9{y^?V>-( zhUkoTxZr?%IB5vf2RlIBp3X0m;!>v$s2Kl^HDT9yt8{OBM0*Vya0F^E%E=B>>;KwT zpgpC6g;MI4GIzoCfJCPS){H#`i~}(Hj0tF?BL5Yc>EV0f7AFOg7OLY=z+k{D)*^^) z35xi>6=nawoeHv@owMQUAt?g!k~T1?JF1mF{H)1Ob5;xg*i!*X>!0<4|MsB@#n!3O zBXaKLuHxRhelhUs^c3sF+yp5Rj*^H;Mr>Vd7VGoGrJcaCq_|Y&fid3omKeMqAW5u5oF(0 zv)r%{F%ZS&5agA)mj1&&&{R4W!O@0c4h@PFj+f4zMC*JeN^f-*K%o;!`G ze*}8{UsbGt!cJF3pjq4EVK+F1ajyCz>Uqvmj~yuz(J3;#{g2I{_^qCfmDWtqL5W7o z+b&0i@M-7QRW@OCABa6kFZfnyleO0ESgsGD-`Tku)eM%JG}JFuhL@YEbpjK8*p6w% z05ue9u;n0NoRxP-nux>0>rDeZ2J6kF)A?E3GUP>OROzoNq%AG+L)XUDjpsgW5(tYt zgYD%PdY6h+<2@}HhD;a?v=j|y(?gWzm7|-u6vOW^@@Jt{1KA zt934lce-9tgO2H7g6+wi=D2o4I;l|rp{T(eNQ51Mn4w7M%?+2d$iS!=$EPMwa%^I5 z(lYQXvw0{g&xo+cna~Lxudl1ctdy+E3K5M?9HgIlATgN&PK=Tcsv4&WO0p9(!E8OX zJ9!n+(xVoC;=E>j?X2(Zp5>tO+Xj&Bkl%;x78ER6)oM{!*$t=o;-eEB1HY7`WM-Ztmu)U~ELr2L!;3Gtj@1%2sCbQ8>(vrc z-U&fG^8Lva(T7dvB+ZkWQSt0|bg!5c=KP@Zo+ga@5ByHEs&uHR7zAbEb#^vBXkzz< zZU3X4|L39W;iV}p1tQfw4Nxrv3nH(;9*)13yq~fqzGIixcP~J{t}2Sd;EJ4t-L5YG zBTW_Nz!9$!{}7gYK^mEr6QlN{sT;B-f|rpVw6c6Q`l#_eFAj8DM2yv`)+8i1KiYUj!> zHmj+E(-vLjt_+lH>7z7jS7hB~^EOekC~Ku>5Snlg5QrWSZv%Ok82&C6K`LQ4iVg!` zTYRw}b%-9`@r^0C?}^EIT>Av(al?Jj8sSfT$JD&863x>_#*7&kxhR`}i_pH&B7!MH zxK|4u(I$Edt7)KaoJeLHM717!7gxHxyzISjPq(55ZZnrM%im?MZ_4{#^KM3jHC{On z>0eu~mOtsjP4rP zb)D|kyt0Bn0*Mojt4h9(%Mfzj5n>(0k6evy2`W)ERs_=*lijnhilDn1LlLx<2A!H= zfdjM;IfH!svW&UrTE~0u1baD>A0J?g%_^i+8VAm=uz34Gp;$L>Em=WGkM=X6nL54k zz8<6V3M(I;i0-E~{NG4xe+N|drwPzLph~aLhH%xtIIgrOOJgh63Ge3cy9yqQ((5qu zA|a;VF@1jMJXHd-9=h*TUS-3M&W%;XO0K53TiRu0#6>L)62jEa)!XHajgH;umBF6K zCR`P757i=PC~Q;(ycF=N7DzBPu367~OmZuh6~)m4pl9V@%zrwQ?yf@5l)J)rP}6U( zdZBMAO|PKP)mKP_+Bp;+oJ0~YtHKQrPlKC08yDt|EqZvW9D%&II}5fZS&Ur^cW+O# zNV*yY={3HXl6c42#n~OlG56$l#{*RfKFtA!xsEs`BZHY};oa@+RsVBN;H8+|6Abmv1D**K172J8SB? z3ckq-Ag8ov3@nBd@T!!`kD9ldoyLV%Y4X8|^ghbuv=^^tx(_ho|c}Yx^{CgS#C%)fHv+ z0JERvvPUI+CO{L*U>NrL)i^t%cBYi;BRr71VQS2Q#UWm-)amV@SXtd$g#WO9x4Np& zy_=6$Zc--N&&R&a-F2|2YFZ;r^rCO&;N(_ONup*|guqSi&wrxOE z-<4C^qkQtK;4n=}dH%Ub3S=JfDa?iBMdclI-&#swyMVK|op7nRadDHj?!3TFp_2kd b9s#KE->+VO#_jp{nBUj^FWLjzN2C7 支持 DDP / 单卡,AMP,resume,日志,checkpoint) +保存为 train_template_localmodel.py +""" +import torch +import torch.nn as nn +import torch.optim as optim +import torch.backends.cudnn as cudnn +import torchvision.transforms as transforms +import torchvision.datasets as datasets +import torchvision.models as tv_models + +import torch.distributed as dist +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.utils.data import DataLoader +from torch.utils.data.distributed import DistributedSampler + +from torch.sdaa import amp +# from torch.cuda import amp + + +# ---------------------------- +# Helper utilities (self-contained) +# ---------------------------- +class AverageMeter(object): + def __init__(self, name='Meter', fmt=':.4f'): + self.name = name + self.fmt = fmt + self.reset() + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / max(1, self.count) + def __str__(self): + fmtstr = '{name} {val' + self.fmt + '} (avg {avg' + self.fmt + '})' + return fmtstr.format(name=self.name, val=self.val, avg=self.avg) + +def accuracy(output, target, topk=(1,)): + """Computes the precision@k for the specified values of k + 返回一个 list,每个元素是 tensor(百分比形式) + """ + with torch.no_grad(): + maxk = max(topk) + batch_size = target.size(0) + + # output: (N, C) -> pred: (maxk, N) + _, pred = output.topk(maxk, 1, True, True) + pred = pred.t() # (maxk, N) + correct = pred.eq(target.view(1, -1).expand_as(pred)) # (maxk, N) bool + + res = [] + for k in topk: + # 把前 k 行展平后求和(返回 0-dim tensor),随后换算为百分比 + correct_k = correct[:k].reshape(-1).float().sum() # 注意:不传 keepdim + # 乘以 100.0 / batch_size,保持返回 tensor(和之前代码兼容) + res.append(correct_k.mul_(100.0 / batch_size)) + return res + +def save_checkpoint(state, is_best, save_dir, filename='checkpoint.pth'): + save_path = os.path.join(save_dir, filename) + torch.save(state, save_path) + if is_best: + best_path = os.path.join(save_dir, 'model_best.pth') + torch.save(state, best_path) + +def set_seed(seed, deterministic=False): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + if deterministic: + cudnn.deterministic = True + cudnn.benchmark = False + else: + cudnn.deterministic = False + cudnn.benchmark = True + +# ---------------------------- +# Argument parser +# ---------------------------- +def parse_args(): + parser = argparse.ArgumentParser(description='Generic PyTorch training template (DDP/AMP) with LocalModel priority') + parser.add_argument('--name', default='run', type=str, help='experiment name (log/checkpoints dir)') + parser.add_argument('--seed', default=42, type=int, help='random seed') + parser.add_argument('--arch', default='None', type=str, help='model name') + parser.add_argument('--deterministic', action='store_true', help='set cudnn deterministic (may be slower)') + parser.add_argument('--dataset', default='cifar10', choices=['cifar10','cifar100','imagenet','custom'], help='which dataset') + parser.add_argument('--datapath', default='./data', type=str, help='dataset root / imagenet root / custom root') + parser.add_argument('--imagenet_dir', default='./imagenet', type=str, help='if dataset=imagenet, path to imagenet root') + parser.add_argument('--custom_eval_dir', default=None, help='if dataset=custom, provide val dir') + parser.add_argument('--num_workers', default=4, type=int, help='dataloader workers per process') + parser.add_argument('--epochs', default=200, type=int) + parser.add_argument('--steps', default=0, type=int, help='max steps to run (if >0, training will stop when global_step reaches this).') + parser.add_argument('--batch_size', default=128, type=int) + parser.add_argument('--model_name', default='resnet18', help='torchvision model name or python path e.g. mypkg.mymodule.Model (used if no local Model)') + parser.add_argument('--num_classes', default=None, type=int, help='override num classes (auto-detect for common sets)') + parser.add_argument('--pretrained', action='store_true', help='use torchvision pretrained weights when available') + parser.add_argument('--optimizer', default='sgd', choices=['sgd','adam','adamw'], help='optimizer') + parser.add_argument('--lr', '--learning_rate', default=0.1, type=float) + parser.add_argument('--momentum', default=0.9, type=float) + parser.add_argument('--weight_decay', default=5e-4, type=float) + parser.add_argument('--nesterov', action='store_true') + parser.add_argument('--scheduler', default='multistep', choices=['multistep','step','cosine','none'], help='lr scheduler') + parser.add_argument('--milestones', default='100,150', type=str, help='milestones for multistep (comma sep)') + parser.add_argument('--step_size', default=30, type=int, help='step size for StepLR or cosine max epochs') + parser.add_argument('--gamma', default=0.1, type=float) + parser.add_argument('--scheduler_step_per_batch', action='store_true', help='call scheduler.step() per batch (for some schedulers)') + parser.add_argument('--resume', default='', type=str, help='path to checkpoint to resume from') + parser.add_argument('--start_epoch', default=0, type=int) + parser.add_argument('--print_freq', default=100, type=int) + parser.add_argument('--save_freq', default=10, type=int, help='save checkpoint every N epochs (rank0 only)') + parser.add_argument('--amp', action='store_true', default = True,help='use automatic mixed precision (AMP)') + parser.add_argument('--grad_accum_steps', default=1, type=int, help='gradient accumulation steps') + parser.add_argument('--local_rank', default=None, type=int, help='local rank passed by torchrun (if any). Use -1 or None for non-distributed') + parser.add_argument('--cutmix_prob', default=0.0, type=float) + parser.add_argument('--beta', default=1.0, type=float) + parser.add_argument('--seed_sampler', default=False, action='store_true', help='set sampler epoch seeds to make deterministic distributed shuffling') + args = parser.parse_args() + args.milestones = [int(x) for x in args.milestones.split(',')] if args.milestones else [] + return args + +# ---------------------------- +# build model (优先 LocalModel) +# ---------------------------- +def build_model_with_local_priority(args, device=None): + """ + 用参数 args.arch 作为模块名导入 Model() + 如果模块不存在或没有 Model 类,则报错停止。 + """ + try: + # 动态导入模块,比如 args.arch = "rexnet" + mod = importlib.import_module(args.arch) + Model = getattr(mod, "Model") # 从模块中获取 Model 类 + except Exception as e: + raise RuntimeError( + f"无法导入模型模块 '{args.arch}' 或未找到类 Model。" + f"\n错误信息:{e}" + ) + + # 解析数据集类别数 + if args.dataset == 'cifar10': + num_classes = 10 + elif args.dataset == 'cifar100': + num_classes = 100 + else: + print(f"[ERROR] 不支持的数据集类型:{args.dataset},无法确定类别数。程序终止。") + sys.exit(1) + + + # 实例化 + try: + model = Model(num_classes) + except Exception as e: + raise RuntimeError( + f"Model() 实例化失败,请检查模型构造函数。\n错误信息:{e}" + ) + + return model + +# ---------------------------- +# Data loader factory +# ---------------------------- +def build_dataloaders(args, rank, world_size): + if args.dataset == 'cifar10' or args.dataset == 'cifar100': + mean = (0.4914, 0.4822, 0.4465) + std = (0.2470, 0.2435, 0.2616) if args.dataset == 'cifar10' else (0.2023, 0.1994, 0.2010) + # train_transform = transforms.Compose([ + # transforms.RandomCrop(32, padding=4), + # transforms.RandomHorizontalFlip(), + # transforms.ToTensor(), + # transforms.Normalize(mean, std), + # ]) + # test_transform = transforms.Compose([ + # transforms.ToTensor(), + # transforms.Normalize(mean, std), + # ]) + + train_transform = transforms.Compose([ # 2025/12/3 从visformer模型开始 + transforms.Resize(256), # 先放大到 256 + transforms.RandomCrop(224), # 再随机裁剪为 224(更符合 ImageNet 风格增强) + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + transforms.Normalize(mean, std), + ]) + test_transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + transforms.Normalize(mean, std), + ]) + root = args.datapath + if args.dataset == 'cifar10': + train_set = datasets.CIFAR10(root=root, train=True, download=False, transform=train_transform) + val_set = datasets.CIFAR10(root=root, train=False, download=False, transform=test_transform) + num_classes = 10 + else: + train_set = datasets.CIFAR100(root=root, train=True, download=False, transform=train_transform) + val_set = datasets.CIFAR100(root=root, train=False, download=False, transform=test_transform) + num_classes = 100 + + elif args.dataset == 'imagenet': + train_dir = os.path.join(args.imagenet_dir, 'train') + val_dir = os.path.join(args.imagenet_dir, 'val') + train_transform = transforms.Compose([ + transforms.RandomResizedCrop(224), + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + transforms.Normalize((0.485,0.456,0.406), (0.229,0.224,0.225)), + ]) + test_transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + transforms.Normalize((0.485,0.456,0.406), (0.229,0.224,0.225)), + ]) + train_set = datasets.ImageFolder(train_dir, train_transform) + val_set = datasets.ImageFolder(val_dir, test_transform) + num_classes = args.num_classes or 1000 + + elif args.dataset == 'custom': + train_dir = os.path.join(args.datapath, 'train') + val_dir = args.custom_eval_dir or os.path.join(args.datapath, 'val') + train_transform = transforms.Compose([ + transforms.RandomResizedCrop(224), + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + ]) + test_transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + ]) + train_set = datasets.ImageFolder(train_dir, train_transform) + val_set = datasets.ImageFolder(val_dir, test_transform) + num_classes = len(train_set.classes) + else: + raise ValueError("Unknown dataset") + + if dist.is_initialized() and world_size > 1: + train_sampler = DistributedSampler(train_set, num_replicas=world_size, rank=rank, shuffle=True) + else: + train_sampler = None + + train_loader = DataLoader(train_set, + batch_size=args.batch_size, + shuffle=(train_sampler is None), + num_workers=args.num_workers, + pin_memory=True, + sampler=train_sampler, + drop_last=False) + val_loader = DataLoader(val_set, + batch_size=args.batch_size, + shuffle=False, + num_workers=args.num_workers, + pin_memory=True) + + return train_loader, val_loader, num_classes, train_sampler + +# ---------------------------- +# Train & validate +# ---------------------------- +def train_one_epoch(args, epoch, model, criterion, optimizer, train_loader, device, scaler, scheduler=None, train_sampler=None, global_step_start=0, max_global_steps=None): + """ + 现在支持:若 max_global_steps 非 None,则当 global_step 达到该值时提前退出 + 返回: epoch_summary_dict, step_logs_list, global_step_end + step_logs_list: list of dicts with per-step info (for logging to CSV if需要) + """ + batch_time = AverageMeter('Time') + data_time = AverageMeter('Data') + losses = AverageMeter('Loss') + top1 = AverageMeter('Acc@1') + top5 = AverageMeter('Acc@5') + + model.train() + end = time.time() + optimizer.zero_grad() + + iters = len(train_loader) + step_logs = [] + global_step = global_step_start + + for i, (images, targets) in enumerate(train_loader): + # check global steps limit + if (max_global_steps is not None) and (global_step >= max_global_steps): + break + + data_time.update(time.time() - end) + images = images.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + + if args.amp: + with amp.autocast(): + outputs = model(images) + loss = criterion(outputs, targets) / args.grad_accum_steps + else: + outputs = model(images) + loss = criterion(outputs, targets) / args.grad_accum_steps + + if args.amp: + scaler.scale(loss).backward() + else: + loss.backward() + + # 每当累积步满足 grad_accum_steps 就 step + if (i + 1) % args.grad_accum_steps == 0: + if args.amp: + scaler.step(optimizer) + scaler.update() + else: + optimizer.step() + optimizer.zero_grad() + if scheduler is not None and args.scheduler_step_per_batch: + scheduler.step() + + with torch.no_grad(): + acc1, acc5 = accuracy(outputs, targets, topk=(1,5)) + losses.update(loss.item() * args.grad_accum_steps, images.size(0)) + top1.update(acc1.item(), images.size(0)) + top5.update(acc5.item(), images.size(0)) + + batch_time.update(time.time() - end) + end = time.time() + + # increment global step AFTER processing this batch + global_step += 1 + + # per-step print (controlled by print_freq) + if ((global_step % args.print_freq == 0) or (i == iters - 1)) and ((dist.get_rank() if dist.is_initialized() else 0) == 0): + lr = optimizer.param_groups[0]['lr'] + print(f"Epoch[{epoch}]:step[{i+1}/{iters}] step_train_loss {losses.val:.4f} acc1 {top1.val:.2f} acc5 {top5.val:.2f}") + + # collect per-step log + step_logs.append({ + 'epoch': epoch, + 'batch_idx': i, + 'global_step': global_step, + 'lr': optimizer.param_groups[0]['lr'], + 'loss': losses.val, + 'loss_avg': losses.avg, + 'acc1': top1.val, + 'acc1_avg': top1.avg, + 'acc5': top5.val, + 'acc5_avg': top5.avg, + 'time': batch_time.val + }) + + # if reached max_global_steps inside epoch, break (handled at loop start next iter) + if (max_global_steps is not None) and (global_step >= max_global_steps): + if (dist.get_rank() if dist.is_initialized() else 0) == 0: + print(f"[Info] 达到 max_global_steps={max_global_steps},将在 epoch 内提前停止。") + break + + # --- flush remaining grads if needed (handle gradient accumulation leftovers) --- + processed_batches = global_step - global_step_start # 实际处理的 batch 数 + if args.grad_accum_steps > 1 and (processed_batches % args.grad_accum_steps) != 0: + # only step if there are gradients + grads_present = any((p.grad is not None and p.requires_grad) for p in model.parameters()) + if grads_present: + if args.amp: + try: + scaler.step(optimizer) + scaler.update() + except Exception as e: + # 防御性:若 scaler.step 因某些原因失败,尝试普通 step(只在极端情况下) + print("[Warning] scaler.step 失败,尝试普通 optimizer.step():", e) + optimizer.step() + else: + optimizer.step() + optimizer.zero_grad() + if scheduler is not None and args.scheduler_step_per_batch: + scheduler.step() + if (dist.get_rank() if dist.is_initialized() else 0) == 0: + print(f"[Info] flushed remaining gradients after early stop (processed_batches={processed_batches}, grad_accum={args.grad_accum_steps}).") + + if scheduler is not None and not args.scheduler_step_per_batch: + scheduler.step() + + return OrderedDict([('loss', losses.avg), ('acc1', top1.avg), ('acc5', top5.avg)]), step_logs, global_step + +def validate(args, model, val_loader, criterion, device, max_batches=None): + """ + Validate on the val_loader. + If max_batches is not None, only process up to that many batches (useful for quick checks). + Returns an OrderedDict with loss/acc1/acc5 (averaged over processed samples). + """ + losses = AverageMeter('Loss') + top1 = AverageMeter('Acc@1') + top5 = AverageMeter('Acc@5') + + model.eval() + processed_batches = 0 + processed_samples = 0 + with torch.no_grad(): + for i, (images, targets) in enumerate(tqdm(val_loader)): + images = images.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(images) + loss = criterion(outputs, targets) + acc1, acc5 = accuracy(outputs, targets, topk=(1,5)) + batch_n = images.size(0) + losses.update(loss.item(), batch_n) + top1.update(acc1.item(), batch_n) + top5.update(acc5.item(), batch_n) + + processed_batches += 1 + processed_samples += batch_n + + if (max_batches is not None) and (processed_batches >= max_batches): + break + + # 如果没处理任何样本,避免除0(不太可能,但防御性) + if processed_samples == 0: + return OrderedDict([('loss', 0.0), ('acc1', 0.0), ('acc5', 0.0)]) + return OrderedDict([('loss', losses.avg), ('acc1', top1.avg), ('acc5', top5.avg)]) + +# ---------------------------- +# Main +# ---------------------------- +def main(): + args = parse_args() + + # handle local_rank from env if not provided + local_rank_env = os.environ.get('LOCAL_RANK', None) + if args.local_rank is None and local_rank_env is not None: + args.local_rank = int(local_rank_env) + + distributed = (args.local_rank is not None and args.local_rank != -1) + if distributed: + dist.init_process_group(backend='nccl', init_method='env://') + rank = dist.get_rank() + world_size = dist.get_world_size() + else: + rank = 0 + world_size = 1 + + if distributed: + torch.cuda.set_device(args.local_rank) + device = torch.device('cuda', args.local_rank) + else: + device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + + set_seed(args.seed + (rank if distributed else 0), deterministic=args.deterministic) + + save_dir = os.path.join('models', args.name) + if rank == 0: + os.makedirs(save_dir, exist_ok=True) + with open(os.path.join(save_dir, 'args.json'), 'w') as f: + json.dump(vars(args), f, indent=2) + if distributed: + dist.barrier() + + train_loader, val_loader, auto_num_classes, train_sampler = build_dataloaders(args, rank, world_size) + if args.num_classes is None: + args.num_classes = auto_num_classes + + # 使用本地 Model 优先(LocalModel 已在文件顶部尝试导入) + model = build_model_with_local_priority(args, device) + model.to(device) + + if distributed: + model = DDP(model, device_ids=[args.local_rank], output_device=args.local_rank, find_unused_parameters=True) + + criterion = nn.CrossEntropyLoss().to(device) + params = [p for p in model.parameters() if p.requires_grad] + if args.optimizer == 'sgd': + optimizer = optim.SGD(params, lr=args.lr, momentum=args.momentum, + weight_decay=args.weight_decay, nesterov=args.nesterov) + elif args.optimizer == 'adam': + optimizer = optim.Adam(params, lr=args.lr, weight_decay=args.weight_decay) + elif args.optimizer == 'adamw': + optimizer = optim.AdamW(params, lr=args.lr, weight_decay=args.weight_decay) + else: + raise ValueError('Unknown optimizer') + + scheduler = None + if args.scheduler == 'multistep': + scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=args.milestones, gamma=args.gamma) + elif args.scheduler == 'step': + scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=args.step_size, gamma=args.gamma) + elif args.scheduler == 'cosine': + scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.epochs) + elif args.scheduler == 'none': + scheduler = None + + scaler = amp.GradScaler() if args.amp else None + + start_epoch = args.start_epoch + best_acc = 0.0 + if args.resume: + if os.path.isfile(args.resume): + ckpt = torch.load(args.resume, map_location='cpu') + model_state = ckpt.get('state_dict', ckpt) + if isinstance(model, DDP): + model.module.load_state_dict(model_state) + else: + model.load_state_dict(model_state) + if 'optimizer' in ckpt: + optimizer.load_state_dict(ckpt['optimizer']) + start_epoch = ckpt.get('epoch', start_epoch) + best_acc = ckpt.get('best_acc', best_acc) + print(f"=> resumed from {args.resume}, start_epoch={start_epoch}") + else: + print(f"=> resume path {args.resume} not found") + + log_columns = ['epoch', 'lr', 'loss', 'acc1', 'acc5', 'val_loss', 'val_acc1', 'val_acc5'] + log_df = pd.DataFrame(columns=log_columns) + # step-level log + step_log_columns = ['epoch', 'batch_idx', 'global_step', 'lr', 'loss', 'loss_avg', 'acc1', 'acc1_avg', 'acc5', 'acc5_avg', 'time'] + step_log_df = pd.DataFrame(columns=step_log_columns) + + total_epochs = args.epochs + # global_step计数器(训练过程中跨epoch持续) + global_step = 0 + + epoch = start_epoch + # loop until either epoch criteria or step criteria met + while True: + if train_sampler is not None: + if args.seed_sampler: + train_sampler.set_epoch(epoch + args.seed) + else: + train_sampler.set_epoch(epoch) + + if rank == 0: + print(f"==== Epoch {epoch}/{total_epochs - 1} ====") + + # 如果传入了 args.steps (>0),则把剩余允许的 step 数传给 train_one_epoch, + # 否则 max_global_steps=None(按整 epoch 执行完) + if args.steps and args.steps > 0: + max_global_steps = args.steps + else: + max_global_steps = None + + train_log, step_logs, global_step = train_one_epoch( + args, epoch, model, criterion, optimizer, train_loader, device, scaler, + scheduler, train_sampler, global_step_start=global_step, max_global_steps=max_global_steps + ) + + # 如果启用了按 steps 的模式且已经达到上限,标记需要在做一次验证后退出 + if max_global_steps is not None and global_step >= max_global_steps: + if rank == 0: + print(f"[Main] 达到 max_global_steps={max_global_steps}(global_step={global_step}),将在完成验证后退出训练。") + # 我们不 return 立刻退出;后面的 validate / 保存逻辑会执行一次,然后 main 返回/结束 + end_due_to_steps = True + else: + end_due_to_steps = False + + # 验证并记录 epoch 级别日志(如果在 step 模式下很可能在中间某个 epoch 提前结束,但我们仍做一次 validate) + val_log = validate(args, model, val_loader, criterion, device, args.batch_size) + current_lr = optimizer.param_groups[0]['lr'] + + if rank == 0: + # epoch summary print, 格式与示例对齐 + print(f"Epoch[{epoch}]: epoch_train_loss {train_log['loss']:.4f} acc1 {train_log['acc1']:.2f} acc5 {train_log['acc5']:.2f} | " + f"val_loss {val_log['loss']:.4f} acc1 {val_log['acc1']:.2f} acc5 {val_log['acc5']:.2f} lr {current_lr:.6f}") + row = { + 'epoch': epoch, + 'lr': current_lr, + 'loss': train_log['loss'], + 'acc1': train_log['acc1'], + 'acc5': train_log['acc5'], + 'val_loss': val_log['loss'], + 'val_acc1': val_log['acc1'], + 'val_acc5': val_log['acc5'], + } + new_row_df = pd.DataFrame([row]) + log_df = pd.concat([log_df, new_row_df], ignore_index=True) + log_df.to_csv(os.path.join(save_dir, 'log.csv'), index=False) + + is_best = val_log['acc1'] > best_acc + if is_best: + best_acc = val_log['acc1'] + if (epoch % args.save_freq == 0) or is_best or ( (max_global_steps is None) and (epoch == total_epochs - 1) ) : + state = { + 'epoch': epoch, + 'state_dict': model.module.state_dict() if isinstance(model, DDP) else model.state_dict(), + 'best_acc': best_acc, + 'optimizer': optimizer.state_dict(), + 'args': vars(args) + } + save_checkpoint(state, is_best, save_dir, filename=f'checkpoint_epoch_{epoch}.pth') + + # 如果是因为 steps 模式达到上限,则在完成 validation / 保存后退出训练 + if end_due_to_steps: + if rank == 0: + print(f"[Main] 已在 steps 模式下完成最后一次验证并保存,训练结束(global_step={global_step})。") + break + + # increment epoch + epoch += 1 + + # stopping conditions: + # 1) if steps mode enabled and reached steps -> stop + if args.steps and args.steps > 0: + if global_step >= args.steps: + if rank == 0: + print(f"[Main] 已达到指定 steps={args.steps}(global_step={global_step}),训练结束。") + break + + # 2) if steps not used, stop when epoch >= epochs + else: + if epoch >= total_epochs: + if rank == 0: + print(f"[Main] 已达到指定 epochs={total_epochs}(epoch={epoch}),训练结束。") + break + + if dist.is_initialized(): + dist.barrier() + if rank == 0: + print("Training finished. Best val acc1: {:.2f}".format(best_acc)) + +if __name__ == '__main__': + main() \ No newline at end of file From 5153c8d0a34fd9a574f0411ab3c673f745ed835e Mon Sep 17 00:00:00 2001 From: wangwl Date: Wed, 7 Jan 2026 06:34:14 +0000 Subject: [PATCH 2/3] fix: cleanup code and update --- .../ResNetV1bV1_5/ResNet-PyTorch/LICENSE | 201 ---------- .../ResNetV1bV1_5/ResNet-PyTorch/README.md | 155 -------- .../ResNetV1bV1_5/ResNet-PyTorch/config.py | 89 ----- .../data/ImageNet_1K_labels_map.txt | 1 - .../ResNet-PyTorch/data/README.md | 43 --- .../ResNetV1bV1_5/ResNet-PyTorch/dataset.py | 230 ------------ .../ResNet-PyTorch/figure/n01440764_36.JPEG | Bin 35135 -> 0 bytes .../ResNetV1bV1_5/ResNet-PyTorch/imgproc.py | 253 ------------- .../ResNetV1bV1_5/ResNet-PyTorch/inference.py | 125 ------- .../ResNetV1bV1_5/ResNet-PyTorch/model.py | 252 ------------- .../ResNet-PyTorch/requirements.txt | 5 - .../scripts/preprocess_imagenet.sh | 34 -- .../scripts/preprocess_mini_imagenet.py | 72 ---- .../ResNetV1bV1_5/ResNet-PyTorch/test.py | 127 ------- .../ResNetV1bV1_5/ResNet-PyTorch/train.py | 350 ------------------ .../ResNetV1bV1_5/ResNet-PyTorch/utils.py | 198 ---------- .../Classification/ResNetV1bV1_5/coverage.txt | 3 - .../Classification/ResNetV1bV1_5/readme | 65 ++++ .../ResNetV1bV1_5/requirements_exact.txt | 89 +++++ .../ResNetV1bV1_5/resnet_loss.jpg | Bin 35594 -> 0 bytes .../ResNetV1bV1_5/resnet_loss.txt | 29 -- 21 files changed, 154 insertions(+), 2167 deletions(-) delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/LICENSE delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/README.md delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/config.py delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/ImageNet_1K_labels_map.txt delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/README.md delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/dataset.py delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/figure/n01440764_36.JPEG delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/imgproc.py delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/inference.py delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/model.py delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/requirements.txt delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_imagenet.sh delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_mini_imagenet.py delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/test.py delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/train.py delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/utils.py delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/coverage.txt create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/readme create mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/requirements_exact.txt delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/resnet_loss.jpg delete mode 100644 PyTorch/build-in/Classification/ResNetV1bV1_5/resnet_loss.txt diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/LICENSE b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/LICENSE deleted file mode 100644 index deeea2d8c..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/README.md b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/README.md deleted file mode 100644 index 8ac2056a5..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/README.md +++ /dev/null @@ -1,155 +0,0 @@ -# ResNet-PyTorch - -## Overview - -This repository contains an op-for-op PyTorch reimplementation -of [Searching for ResNet](https://arxiv.org/pdf/1512.03385v1.pdf). - -## Table of contents - -- [ResNet-PyTorch](#resnet-pytorch) - - [Overview](#overview) - - [Table of contents](#table-of-contents) - - [Download weights](#download-weights) - - [Download datasets](#download-datasets) - - [How Test and Train](#how-test-and-train) - - [Test](#test) - - [Train model](#train-model) - - [Resume train model](#resume-train-model) - - [Result](#result) - - [Contributing](#contributing) - - [Credit](#credit) - - [Deep Residual Learning for Image Recognition](#deep-residual-learning-for-image-recognition) - -## Download weights - -- [Google Driver](https://drive.google.com/drive/folders/17ju2HN7Y6pyPK2CC_AqnAfTOe9_3hCQ8?usp=sharing) -- [Baidu Driver](https://pan.baidu.com/s/1yNs4rqIb004-NKEdKBJtYg?pwd=llot) - -## Download datasets - -Contains MNIST, CIFAR10&CIFAR100, TinyImageNet_200, MiniImageNet_1K, ImageNet_1K, Caltech101&Caltech256 and more etc. - -- [Google Driver](https://drive.google.com/drive/folders/1f-NSpZc07Qlzhgi6EbBEI1wTkN1MxPbQ?usp=sharing) -- [Baidu Driver](https://pan.baidu.com/s/1arNM38vhDT7p4jKeD4sqwA?pwd=llot) - -Please refer to `README.md` in the `data` directory for the method of making a dataset. - -## How Test and Train - -Both training and testing only need to modify the `config.py` file. - -### Test - -- line 29: `model_arch_name` change to `resnet18`. -- line 31: `model_mean_parameters` change to `[0.485, 0.456, 0.406]`. -- line 32: `model_std_parameters` change to `[0.229, 0.224, 0.225]`. -- line 34: `model_num_classes` change to `1000`. -- line 36: `mode` change to `test`. -- line 89: `model_weights_path` change to `./results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar`. - -```bash -python3 test.py -``` - -### Train model - -- line 29: `model_arch_name` change to `resnet18`. -- line 31: `model_mean_parameters` change to `[0.485, 0.456, 0.406]`. -- line 32: `model_std_parameters` change to `[0.229, 0.224, 0.225]`. -- line 34: `model_num_classes` change to `1000`. -- line 36: `mode` change to `train`. -- line 50: `pretrained_model_weights_path` change to `./results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar`. - -```bash -python3 train.py -``` - -### Resume train model - -- line 29: `model_arch_name` change to `resnet18`. -- line 31: `model_mean_parameters` change to `[0.485, 0.456, 0.406]`. -- line 32: `model_std_parameters` change to `[0.229, 0.224, 0.225]`. -- line 34: `model_num_classes` change to `1000`. -- line 36: `mode` change to `train`. -- line 53: `resume` change to `./samples/resnet18-ImageNet_1K/epoch_xxx.pth.tar`. - -```bash -python3 train.py -``` - -## Result - -Source of original paper results: [https://arxiv.org/pdf/1512.03385v1.pdf](https://arxiv.org/pdf/1512.03385v1.pdf)) - -In the following table, the top-x error value in `()` indicates the result of the project, and `-` indicates no test. - -| Model | Dataset | Top-1 error (val) | Top-5 error (val) | -|:---------:|:-----------:|:------------------:|:-----------------:| -| resnet18 | ImageNet_1K | 27.88%(**30.25%**) | -(**10.93%**) | -| resnet34 | ImageNet_1K | 25.03%(**26.71%**) | 7.76%(**8.58%**) | -| resnet50 | ImageNet_1K | 22.85%(**19.65%**) | 6.71%(**4.87%**) | -| resnet101 | ImageNet_1K | 21.75%(**18.33%**) | 6.05%(**4.34%**) | -| resnet152 | ImageNet_1K | 21.43%(**17.66%**) | 5.71%(**4.08%**) | - -```bash -# Download `ResNet18-ImageNet_1K-57bb63e.pth.tar` weights to `./results/pretrained_models` -# More detail see `README.md` -python3 ./inference.py -``` - -Input: - - - -Output: - -```text -Build `resnet18` model successfully. -Load `resnet18` model weights `/ResNet-PyTorch/results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar` successfully. -tench, Tinca tinca (91.46%) -barracouta, snoek (7.15%) -gar, garfish, garpike, billfish, Lepisosteus osseus (0.43%) -coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch (0.27%) -platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus (0.21%) -``` - -## Contributing - -If you find a bug, create a GitHub issue, or even better, submit a pull request. Similarly, if you have questions, -simply post them as GitHub issues. - -I look forward to seeing what the community does with these models! - -### Credit - -#### Deep Residual Learning for Image Recognition - -*Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun* - -##### Abstract - -Deeper neural networks are more difficult to train. We present a residual learning framework to ease the training of -networks that are substantially deeper than those used previously. We explicitly reformulate the layers as learning -residual functions with reference to the layer inputs, instead of learning unreferenced functions. We provide -comprehensive empirical evidence showing that these residual networks are easier to optimize, and can gain accuracy from -considerably increased depth. On the ImageNet dataset we evaluate residual nets with a depth of up to 152 layers---8x -deeper than VGG nets but still having lower complexity. An ensemble of these residual nets achieves 3.57% error on the -ImageNet test set. This result won the 1st place on the ILSVRC 2015 classification task. We also present analysis on -CIFAR-10 with 100 and 1000 layers. -The depth of representations is of central importance for many visual recognition tasks. Solely due to our extremely -deep representations, we obtain a 28% relative improvement on the COCO object detection dataset. Deep residual nets are -foundations of our submissions to ILSVRC & COCO 2015 competitions, where we also won the 1st places on the tasks of -ImageNet detection, ImageNet localization, COCO detection, and COCO segmentation. - -[[Paper]](https://arxiv.org/pdf/1512.03385v1.pdf) - -```bibtex -@inproceedings{he2016deep, - title={Deep residual learning for image recognition}, - author={He, Kaiming and Zhang, Xiangyu and Ren, Shaoqing and Sun, Jian}, - booktitle={Proceedings of the IEEE conference on computer vision and pattern recognition}, - pages={770--778}, - year={2016} -} -``` \ No newline at end of file diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/config.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/config.py deleted file mode 100644 index 49617d1af..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/config.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -import random - -import numpy as np -import torch -from torch.backends import cudnn - -# Random seed to maintain reproducible results -random.seed(0) -torch.manual_seed(0) -np.random.seed(0) -# Use GPU for training by default -device = torch.device("cuda", 0) -# Turning on when the image size does not change during training can speed up training -cudnn.benchmark = True -# Model arch name -model_arch_name = "resnet18" -# Model normalization parameters -model_mean_parameters = [0.485, 0.456, 0.406] -model_std_parameters = [0.229, 0.224, 0.225] -# Model number class -model_num_classes = 1000 -# Current configuration parameter method -mode = "train" -# Experiment name, easy to save weights and log files -exp_name = f"{model_arch_name}-ImageNet_1K" - -if mode == "train": - # Dataset address - train_image_dir = "./data/ImageNet_1K/ILSVRC2012_img_train" - valid_image_dir = "./data/ImageNet_1K/ILSVRC2012_img_val" - - image_size = 224 - batch_size = 128 - num_workers = 4 - - # The address to load the pretrained model - pretrained_model_weights_path = "./results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar" - - # Incremental training and migration training - resume = "" - - # Total num epochs - epochs = 600 - - # Loss parameters - loss_label_smoothing = 0.1 - loss_weights = 1.0 - - # Optimizer parameter - model_lr = 0.1 - model_momentum = 0.9 - model_weight_decay = 2e-05 - model_ema_decay = 0.99998 - - # Learning rate scheduler parameter - lr_scheduler_T_0 = epochs // 4 - lr_scheduler_T_mult = 1 - lr_scheduler_eta_min = 5e-5 - - # How many iterations to print the training/validate result - train_print_frequency = 200 - valid_print_frequency = 20 - -if mode == "test": - # Test data address - test_image_dir = "./data/ImageNet_1K/ILSVRC2012_img_val" - - # Test dataloader parameters - image_size = 224 - batch_size = 256 - num_workers = 4 - - # How many iterations to print the testing result - test_print_frequency = 20 - - model_weights_path = "./results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar" diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/ImageNet_1K_labels_map.txt b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/ImageNet_1K_labels_map.txt deleted file mode 100644 index ae7d9a9cb..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/ImageNet_1K_labels_map.txt +++ /dev/null @@ -1 +0,0 @@ -{"0": "tench, Tinca tinca", "1": "goldfish, Carassius auratus", "2": "great white shark, white shark, man-eater, man-eating shark, Carcharodon carcharias", "3": "tiger shark, Galeocerdo cuvieri", "4": "hammerhead, hammerhead shark", "5": "electric ray, crampfish, numbfish, torpedo", "6": "stingray", "7": "cock", "8": "hen", "9": "ostrich, Struthio camelus", "10": "brambling, Fringilla montifringilla", "11": "goldfinch, Carduelis carduelis", "12": "house finch, linnet, Carpodacus mexicanus", "13": "junco, snowbird", "14": "indigo bunting, indigo finch, indigo bird, Passerina cyanea", "15": "robin, American robin, Turdus migratorius", "16": "bulbul", "17": "jay", "18": "magpie", "19": "chickadee", "20": "water ouzel, dipper", "21": "kite", "22": "bald eagle, American eagle, Haliaeetus leucocephalus", "23": "vulture", "24": "great grey owl, great gray owl, Strix nebulosa", "25": "European fire salamander, Salamandra salamandra", "26": "common newt, Triturus vulgaris", "27": "eft", "28": "spotted salamander, Ambystoma maculatum", "29": "axolotl, mud puppy, Ambystoma mexicanum", "30": "bullfrog, Rana catesbeiana", "31": "tree frog, tree-frog", "32": "tailed frog, bell toad, ribbed toad, tailed toad, Ascaphus trui", "33": "loggerhead, loggerhead turtle, Caretta caretta", "34": "leatherback turtle, leatherback, leathery turtle, Dermochelys coriacea", "35": "mud turtle", "36": "terrapin", "37": "box turtle, box tortoise", "38": "banded gecko", "39": "common iguana, iguana, Iguana iguana", "40": "American chameleon, anole, Anolis carolinensis", "41": "whiptail, whiptail lizard", "42": "agama", "43": "frilled lizard, Chlamydosaurus kingi", "44": "alligator lizard", "45": "Gila monster, Heloderma suspectum", "46": "green lizard, Lacerta viridis", "47": "African chameleon, Chamaeleo chamaeleon", "48": "Komodo dragon, Komodo lizard, dragon lizard, giant lizard, Varanus komodoensis", "49": "African crocodile, Nile crocodile, Crocodylus niloticus", "50": "American alligator, Alligator mississipiensis", "51": "triceratops", "52": "thunder snake, worm snake, Carphophis amoenus", "53": "ringneck snake, ring-necked snake, ring snake", "54": "hognose snake, puff adder, sand viper", "55": "green snake, grass snake", "56": "king snake, kingsnake", "57": "garter snake, grass snake", "58": "water snake", "59": "vine snake", "60": "night snake, Hypsiglena torquata", "61": "boa constrictor, Constrictor constrictor", "62": "rock python, rock snake, Python sebae", "63": "Indian cobra, Naja naja", "64": "green mamba", "65": "sea snake", "66": "horned viper, cerastes, sand viper, horned asp, Cerastes cornutus", "67": "diamondback, diamondback rattlesnake, Crotalus adamanteus", "68": "sidewinder, horned rattlesnake, Crotalus cerastes", "69": "trilobite", "70": "harvestman, daddy longlegs, Phalangium opilio", "71": "scorpion", "72": "black and gold garden spider, Argiope aurantia", "73": "barn spider, Araneus cavaticus", "74": "garden spider, Aranea diademata", "75": "black widow, Latrodectus mactans", "76": "tarantula", "77": "wolf spider, hunting spider", "78": "tick", "79": "centipede", "80": "black grouse", "81": "ptarmigan", "82": "ruffed grouse, partridge, Bonasa umbellus", "83": "prairie chicken, prairie grouse, prairie fowl", "84": "peacock", "85": "quail", "86": "partridge", "87": "African grey, African gray, Psittacus erithacus", "88": "macaw", "89": "sulphur-crested cockatoo, Kakatoe galerita, Cacatua galerita", "90": "lorikeet", "91": "coucal", "92": "bee eater", "93": "hornbill", "94": "hummingbird", "95": "jacamar", "96": "toucan", "97": "drake", "98": "red-breasted merganser, Mergus serrator", "99": "goose", "100": "black swan, Cygnus atratus", "101": "tusker", "102": "echidna, spiny anteater, anteater", "103": "platypus, duckbill, duckbilled platypus, duck-billed platypus, Ornithorhynchus anatinus", "104": "wallaby, brush kangaroo", "105": "koala, koala bear, kangaroo bear, native bear, Phascolarctos cinereus", "106": "wombat", "107": "jellyfish", "108": "sea anemone, anemone", "109": "brain coral", "110": "flatworm, platyhelminth", "111": "nematode, nematode worm, roundworm", "112": "conch", "113": "snail", "114": "slug", "115": "sea slug, nudibranch", "116": "chiton, coat-of-mail shell, sea cradle, polyplacophore", "117": "chambered nautilus, pearly nautilus, nautilus", "118": "Dungeness crab, Cancer magister", "119": "rock crab, Cancer irroratus", "120": "fiddler crab", "121": "king crab, Alaska crab, Alaskan king crab, Alaska king crab, Paralithodes camtschatica", "122": "American lobster, Northern lobster, Maine lobster, Homarus americanus", "123": "spiny lobster, langouste, rock lobster, crawfish, crayfish, sea crawfish", "124": "crayfish, crawfish, crawdad, crawdaddy", "125": "hermit crab", "126": "isopod", "127": "white stork, Ciconia ciconia", "128": "black stork, Ciconia nigra", "129": "spoonbill", "130": "flamingo", "131": "little blue heron, Egretta caerulea", "132": "American egret, great white heron, Egretta albus", "133": "bittern", "134": "crane", "135": "limpkin, Aramus pictus", "136": "European gallinule, Porphyrio porphyrio", "137": "American coot, marsh hen, mud hen, water hen, Fulica americana", "138": "bustard", "139": "ruddy turnstone, Arenaria interpres", "140": "red-backed sandpiper, dunlin, Erolia alpina", "141": "redshank, Tringa totanus", "142": "dowitcher", "143": "oystercatcher, oyster catcher", "144": "pelican", "145": "king penguin, Aptenodytes patagonica", "146": "albatross, mollymawk", "147": "grey whale, gray whale, devilfish, Eschrichtius gibbosus, Eschrichtius robustus", "148": "killer whale, killer, orca, grampus, sea wolf, Orcinus orca", "149": "dugong, Dugong dugon", "150": "sea lion", "151": "Chihuahua", "152": "Japanese spaniel", "153": "Maltese dog, Maltese terrier, Maltese", "154": "Pekinese, Pekingese, Peke", "155": "Shih-Tzu", "156": "Blenheim spaniel", "157": "papillon", "158": "toy terrier", "159": "Rhodesian ridgeback", "160": "Afghan hound, Afghan", "161": "basset, basset hound", "162": "beagle", "163": "bloodhound, sleuthhound", "164": "bluetick", "165": "black-and-tan coonhound", "166": "Walker hound, Walker foxhound", "167": "English foxhound", "168": "redbone", "169": "borzoi, Russian wolfhound", "170": "Irish wolfhound", "171": "Italian greyhound", "172": "whippet", "173": "Ibizan hound, Ibizan Podenco", "174": "Norwegian elkhound, elkhound", "175": "otterhound, otter hound", "176": "Saluki, gazelle hound", "177": "Scottish deerhound, deerhound", "178": "Weimaraner", "179": "Staffordshire bullterrier, Staffordshire bull terrier", "180": "American Staffordshire terrier, Staffordshire terrier, American pit bull terrier, pit bull terrier", "181": "Bedlington terrier", "182": "Border terrier", "183": "Kerry blue terrier", "184": "Irish terrier", "185": "Norfolk terrier", "186": "Norwich terrier", "187": "Yorkshire terrier", "188": "wire-haired fox terrier", "189": "Lakeland terrier", "190": "Sealyham terrier, Sealyham", "191": "Airedale, Airedale terrier", "192": "cairn, cairn terrier", "193": "Australian terrier", "194": "Dandie Dinmont, Dandie Dinmont terrier", "195": "Boston bull, Boston terrier", "196": "miniature schnauzer", "197": "giant schnauzer", "198": "standard schnauzer", "199": "Scotch terrier, Scottish terrier, Scottie", "200": "Tibetan terrier, chrysanthemum dog", "201": "silky terrier, Sydney silky", "202": "soft-coated wheaten terrier", "203": "West Highland white terrier", "204": "Lhasa, Lhasa apso", "205": "flat-coated retriever", "206": "curly-coated retriever", "207": "golden retriever", "208": "Labrador retriever", "209": "Chesapeake Bay retriever", "210": "German short-haired pointer", "211": "vizsla, Hungarian pointer", "212": "English setter", "213": "Irish setter, red setter", "214": "Gordon setter", "215": "Brittany spaniel", "216": "clumber, clumber spaniel", "217": "English springer, English springer spaniel", "218": "Welsh springer spaniel", "219": "cocker spaniel, English cocker spaniel, cocker", "220": "Sussex spaniel", "221": "Irish water spaniel", "222": "kuvasz", "223": "schipperke", "224": "groenendael", "225": "malinois", "226": "briard", "227": "kelpie", "228": "komondor", "229": "Old English sheepdog, bobtail", "230": "Shetland sheepdog, Shetland sheep dog, Shetland", "231": "collie", "232": "Border collie", "233": "Bouvier des Flandres, Bouviers des Flandres", "234": "Rottweiler", "235": "German shepherd, German shepherd dog, German police dog, alsatian", "236": "Doberman, Doberman pinscher", "237": "miniature pinscher", "238": "Greater Swiss Mountain dog", "239": "Bernese mountain dog", "240": "Appenzeller", "241": "EntleBucher", "242": "boxer", "243": "bull mastiff", "244": "Tibetan mastiff", "245": "French bulldog", "246": "Great Dane", "247": "Saint Bernard, St Bernard", "248": "Eskimo dog, husky", "249": "malamute, malemute, Alaskan malamute", "250": "Siberian husky", "251": "dalmatian, coach dog, carriage dog", "252": "affenpinscher, monkey pinscher, monkey dog", "253": "basenji", "254": "pug, pug-dog", "255": "Leonberg", "256": "Newfoundland, Newfoundland dog", "257": "Great Pyrenees", "258": "Samoyed, Samoyede", "259": "Pomeranian", "260": "chow, chow chow", "261": "keeshond", "262": "Brabancon griffon", "263": "Pembroke, Pembroke Welsh corgi", "264": "Cardigan, Cardigan Welsh corgi", "265": "toy poodle", "266": "miniature poodle", "267": "standard poodle", "268": "Mexican hairless", "269": "timber wolf, grey wolf, gray wolf, Canis lupus", "270": "white wolf, Arctic wolf, Canis lupus tundrarum", "271": "red wolf, maned wolf, Canis rufus, Canis niger", "272": "coyote, prairie wolf, brush wolf, Canis latrans", "273": "dingo, warrigal, warragal, Canis dingo", "274": "dhole, Cuon alpinus", "275": "African hunting dog, hyena dog, Cape hunting dog, Lycaon pictus", "276": "hyena, hyaena", "277": "red fox, Vulpes vulpes", "278": "kit fox, Vulpes macrotis", "279": "Arctic fox, white fox, Alopex lagopus", "280": "grey fox, gray fox, Urocyon cinereoargenteus", "281": "tabby, tabby cat", "282": "tiger cat", "283": "Persian cat", "284": "Siamese cat, Siamese", "285": "Egyptian cat", "286": "cougar, puma, catamount, mountain lion, painter, panther, Felis concolor", "287": "lynx, catamount", "288": "leopard, Panthera pardus", "289": "snow leopard, ounce, Panthera uncia", "290": "jaguar, panther, Panthera onca, Felis onca", "291": "lion, king of beasts, Panthera leo", "292": "tiger, Panthera tigris", "293": "cheetah, chetah, Acinonyx jubatus", "294": "brown bear, bruin, Ursus arctos", "295": "American black bear, black bear, Ursus americanus, Euarctos americanus", "296": "ice bear, polar bear, Ursus Maritimus, Thalarctos maritimus", "297": "sloth bear, Melursus ursinus, Ursus ursinus", "298": "mongoose", "299": "meerkat, mierkat", "300": "tiger beetle", "301": "ladybug, ladybeetle, lady beetle, ladybird, ladybird beetle", "302": "ground beetle, carabid beetle", "303": "long-horned beetle, longicorn, longicorn beetle", "304": "leaf beetle, chrysomelid", "305": "dung beetle", "306": "rhinoceros beetle", "307": "weevil", "308": "fly", "309": "bee", "310": "ant, emmet, pismire", "311": "grasshopper, hopper", "312": "cricket", "313": "walking stick, walkingstick, stick insect", "314": "cockroach, roach", "315": "mantis, mantid", "316": "cicada, cicala", "317": "leafhopper", "318": "lacewing, lacewing fly", "319": "dragonfly, darning needle, devil's darning needle, sewing needle, snake feeder, snake doctor, mosquito hawk, skeeter hawk", "320": "damselfly", "321": "admiral", "322": "ringlet, ringlet butterfly", "323": "monarch, monarch butterfly, milkweed butterfly, Danaus plexippus", "324": "cabbage butterfly", "325": "sulphur butterfly, sulfur butterfly", "326": "lycaenid, lycaenid butterfly", "327": "starfish, sea star", "328": "sea urchin", "329": "sea cucumber, holothurian", "330": "wood rabbit, cottontail, cottontail rabbit", "331": "hare", "332": "Angora, Angora rabbit", "333": "hamster", "334": "porcupine, hedgehog", "335": "fox squirrel, eastern fox squirrel, Sciurus niger", "336": "marmot", "337": "beaver", "338": "guinea pig, Cavia cobaya", "339": "sorrel", "340": "zebra", "341": "hog, pig, grunter, squealer, Sus scrofa", "342": "wild boar, boar, Sus scrofa", "343": "warthog", "344": "hippopotamus, hippo, river horse, Hippopotamus amphibius", "345": "ox", "346": "water buffalo, water ox, Asiatic buffalo, Bubalus bubalis", "347": "bison", "348": "ram, tup", "349": "bighorn, bighorn sheep, cimarron, Rocky Mountain bighorn, Rocky Mountain sheep, Ovis canadensis", "350": "ibex, Capra ibex", "351": "hartebeest", "352": "impala, Aepyceros melampus", "353": "gazelle", "354": "Arabian camel, dromedary, Camelus dromedarius", "355": "llama", "356": "weasel", "357": "mink", "358": "polecat, fitch, foulmart, foumart, Mustela putorius", "359": "black-footed ferret, ferret, Mustela nigripes", "360": "otter", "361": "skunk, polecat, wood pussy", "362": "badger", "363": "armadillo", "364": "three-toed sloth, ai, Bradypus tridactylus", "365": "orangutan, orang, orangutang, Pongo pygmaeus", "366": "gorilla, Gorilla gorilla", "367": "chimpanzee, chimp, Pan troglodytes", "368": "gibbon, Hylobates lar", "369": "siamang, Hylobates syndactylus, Symphalangus syndactylus", "370": "guenon, guenon monkey", "371": "patas, hussar monkey, Erythrocebus patas", "372": "baboon", "373": "macaque", "374": "langur", "375": "colobus, colobus monkey", "376": "proboscis monkey, Nasalis larvatus", "377": "marmoset", "378": "capuchin, ringtail, Cebus capucinus", "379": "howler monkey, howler", "380": "titi, titi monkey", "381": "spider monkey, Ateles geoffroyi", "382": "squirrel monkey, Saimiri sciureus", "383": "Madagascar cat, ring-tailed lemur, Lemur catta", "384": "indri, indris, Indri indri, Indri brevicaudatus", "385": "Indian elephant, Elephas maximus", "386": "African elephant, Loxodonta africana", "387": "lesser panda, red panda, panda, bear cat, cat bear, Ailurus fulgens", "388": "giant panda, panda, panda bear, coon bear, Ailuropoda melanoleuca", "389": "barracouta, snoek", "390": "eel", "391": "coho, cohoe, coho salmon, blue jack, silver salmon, Oncorhynchus kisutch", "392": "rock beauty, Holocanthus tricolor", "393": "anemone fish", "394": "sturgeon", "395": "gar, garfish, garpike, billfish, Lepisosteus osseus", "396": "lionfish", "397": "puffer, pufferfish, blowfish, globefish", "398": "abacus", "399": "abaya", "400": "academic gown, academic robe, judge's robe", "401": "accordion, piano accordion, squeeze box", "402": "acoustic guitar", "403": "aircraft carrier, carrier, flattop, attack aircraft carrier", "404": "airliner", "405": "airship, dirigible", "406": "altar", "407": "ambulance", "408": "amphibian, amphibious vehicle", "409": "analog clock", "410": "apiary, bee house", "411": "apron", "412": "ashcan, trash can, garbage can, wastebin, ash bin, ash-bin, ashbin, dustbin, trash barrel, trash bin", "413": "assault rifle, assault gun", "414": "backpack, back pack, knapsack, packsack, rucksack, haversack", "415": "bakery, bakeshop, bakehouse", "416": "balance beam, beam", "417": "balloon", "418": "ballpoint, ballpoint pen, ballpen, Biro", "419": "Band Aid", "420": "banjo", "421": "bannister, banister, balustrade, balusters, handrail", "422": "barbell", "423": "barber chair", "424": "barbershop", "425": "barn", "426": "barometer", "427": "barrel, cask", "428": "barrow, garden cart, lawn cart, wheelbarrow", "429": "baseball", "430": "basketball", "431": "bassinet", "432": "bassoon", "433": "bathing cap, swimming cap", "434": "bath towel", "435": "bathtub, bathing tub, bath, tub", "436": "beach wagon, station wagon, wagon, estate car, beach waggon, station waggon, waggon", "437": "beacon, lighthouse, beacon light, pharos", "438": "beaker", "439": "bearskin, busby, shako", "440": "beer bottle", "441": "beer glass", "442": "bell cote, bell cot", "443": "bib", "444": "bicycle-built-for-two, tandem bicycle, tandem", "445": "bikini, two-piece", "446": "binder, ring-binder", "447": "binoculars, field glasses, opera glasses", "448": "birdhouse", "449": "boathouse", "450": "bobsled, bobsleigh, bob", "451": "bolo tie, bolo, bola tie, bola", "452": "bonnet, poke bonnet", "453": "bookcase", "454": "bookshop, bookstore, bookstall", "455": "bottlecap", "456": "bow", "457": "bow tie, bow-tie, bowtie", "458": "brass, memorial tablet, plaque", "459": "brassiere, bra, bandeau", "460": "breakwater, groin, groyne, mole, bulwark, seawall, jetty", "461": "breastplate, aegis, egis", "462": "broom", "463": "bucket, pail", "464": "buckle", "465": "bulletproof vest", "466": "bullet train, bullet", "467": "butcher shop, meat market", "468": "cab, hack, taxi, taxicab", "469": "caldron, cauldron", "470": "candle, taper, wax light", "471": "cannon", "472": "canoe", "473": "can opener, tin opener", "474": "cardigan", "475": "car mirror", "476": "carousel, carrousel, merry-go-round, roundabout, whirligig", "477": "carpenter's kit, tool kit", "478": "carton", "479": "car wheel", "480": "cash machine, cash dispenser, automated teller machine, automatic teller machine, automated teller, automatic teller, ATM", "481": "cassette", "482": "cassette player", "483": "castle", "484": "catamaran", "485": "CD player", "486": "cello, violoncello", "487": "cellular telephone, cellular phone, cellphone, cell, mobile phone", "488": "chain", "489": "chainlink fence", "490": "chain mail, ring mail, mail, chain armor, chain armour, ring armor, ring armour", "491": "chain saw, chainsaw", "492": "chest", "493": "chiffonier, commode", "494": "chime, bell, gong", "495": "china cabinet, china closet", "496": "Christmas stocking", "497": "church, church building", "498": "cinema, movie theater, movie theatre, movie house, picture palace", "499": "cleaver, meat cleaver, chopper", "500": "cliff dwelling", "501": "cloak", "502": "clog, geta, patten, sabot", "503": "cocktail shaker", "504": "coffee mug", "505": "coffeepot", "506": "coil, spiral, volute, whorl, helix", "507": "combination lock", "508": "computer keyboard, keypad", "509": "confectionery, confectionary, candy store", "510": "container ship, containership, container vessel", "511": "convertible", "512": "corkscrew, bottle screw", "513": "cornet, horn, trumpet, trump", "514": "cowboy boot", "515": "cowboy hat, ten-gallon hat", "516": "cradle", "517": "crane", "518": "crash helmet", "519": "crate", "520": "crib, cot", "521": "Crock Pot", "522": "croquet ball", "523": "crutch", "524": "cuirass", "525": "dam, dike, dyke", "526": "desk", "527": "desktop computer", "528": "dial telephone, dial phone", "529": "diaper, nappy, napkin", "530": "digital clock", "531": "digital watch", "532": "dining table, board", "533": "dishrag, dishcloth", "534": "dishwasher, dish washer, dishwashing machine", "535": "disk brake, disc brake", "536": "dock, dockage, docking facility", "537": "dogsled, dog sled, dog sleigh", "538": "dome", "539": "doormat, welcome mat", "540": "drilling platform, offshore rig", "541": "drum, membranophone, tympan", "542": "drumstick", "543": "dumbbell", "544": "Dutch oven", "545": "electric fan, blower", "546": "electric guitar", "547": "electric locomotive", "548": "entertainment center", "549": "envelope", "550": "espresso maker", "551": "face powder", "552": "feather boa, boa", "553": "file, file cabinet, filing cabinet", "554": "fireboat", "555": "fire engine, fire truck", "556": "fire screen, fireguard", "557": "flagpole, flagstaff", "558": "flute, transverse flute", "559": "folding chair", "560": "football helmet", "561": "forklift", "562": "fountain", "563": "fountain pen", "564": "four-poster", "565": "freight car", "566": "French horn, horn", "567": "frying pan, frypan, skillet", "568": "fur coat", "569": "garbage truck, dustcart", "570": "gasmask, respirator, gas helmet", "571": "gas pump, gasoline pump, petrol pump, island dispenser", "572": "goblet", "573": "go-kart", "574": "golf ball", "575": "golfcart, golf cart", "576": "gondola", "577": "gong, tam-tam", "578": "gown", "579": "grand piano, grand", "580": "greenhouse, nursery, glasshouse", "581": "grille, radiator grille", "582": "grocery store, grocery, food market, market", "583": "guillotine", "584": "hair slide", "585": "hair spray", "586": "half track", "587": "hammer", "588": "hamper", "589": "hand blower, blow dryer, blow drier, hair dryer, hair drier", "590": "hand-held computer, hand-held microcomputer", "591": "handkerchief, hankie, hanky, hankey", "592": "hard disc, hard disk, fixed disk", "593": "harmonica, mouth organ, harp, mouth harp", "594": "harp", "595": "harvester, reaper", "596": "hatchet", "597": "holster", "598": "home theater, home theatre", "599": "honeycomb", "600": "hook, claw", "601": "hoopskirt, crinoline", "602": "horizontal bar, high bar", "603": "horse cart, horse-cart", "604": "hourglass", "605": "iPod", "606": "iron, smoothing iron", "607": "jack-o'-lantern", "608": "jean, blue jean, denim", "609": "jeep, landrover", "610": "jersey, T-shirt, tee shirt", "611": "jigsaw puzzle", "612": "jinrikisha, ricksha, rickshaw", "613": "joystick", "614": "kimono", "615": "knee pad", "616": "knot", "617": "lab coat, laboratory coat", "618": "ladle", "619": "lampshade, lamp shade", "620": "laptop, laptop computer", "621": "lawn mower, mower", "622": "lens cap, lens cover", "623": "letter opener, paper knife, paperknife", "624": "library", "625": "lifeboat", "626": "lighter, light, igniter, ignitor", "627": "limousine, limo", "628": "liner, ocean liner", "629": "lipstick, lip rouge", "630": "Loafer", "631": "lotion", "632": "loudspeaker, speaker, speaker unit, loudspeaker system, speaker system", "633": "loupe, jeweler's loupe", "634": "lumbermill, sawmill", "635": "magnetic compass", "636": "mailbag, postbag", "637": "mailbox, letter box", "638": "maillot", "639": "maillot, tank suit", "640": "manhole cover", "641": "maraca", "642": "marimba, xylophone", "643": "mask", "644": "matchstick", "645": "maypole", "646": "maze, labyrinth", "647": "measuring cup", "648": "medicine chest, medicine cabinet", "649": "megalith, megalithic structure", "650": "microphone, mike", "651": "microwave, microwave oven", "652": "military uniform", "653": "milk can", "654": "minibus", "655": "miniskirt, mini", "656": "minivan", "657": "missile", "658": "mitten", "659": "mixing bowl", "660": "mobile home, manufactured home", "661": "Model T", "662": "modem", "663": "monastery", "664": "monitor", "665": "moped", "666": "mortar", "667": "mortarboard", "668": "mosque", "669": "mosquito net", "670": "motor scooter, scooter", "671": "mountain bike, all-terrain bike, off-roader", "672": "mountain tent", "673": "mouse, computer mouse", "674": "mousetrap", "675": "moving van", "676": "muzzle", "677": "nail", "678": "neck brace", "679": "necklace", "680": "nipple", "681": "notebook, notebook computer", "682": "obelisk", "683": "oboe, hautboy, hautbois", "684": "ocarina, sweet potato", "685": "odometer, hodometer, mileometer, milometer", "686": "oil filter", "687": "organ, pipe organ", "688": "oscilloscope, scope, cathode-ray oscilloscope, CRO", "689": "overskirt", "690": "oxcart", "691": "oxygen mask", "692": "packet", "693": "paddle, boat paddle", "694": "paddlewheel, paddle wheel", "695": "padlock", "696": "paintbrush", "697": "pajama, pyjama, pj's, jammies", "698": "palace", "699": "panpipe, pandean pipe, syrinx", "700": "paper towel", "701": "parachute, chute", "702": "parallel bars, bars", "703": "park bench", "704": "parking meter", "705": "passenger car, coach, carriage", "706": "patio, terrace", "707": "pay-phone, pay-station", "708": "pedestal, plinth, footstall", "709": "pencil box, pencil case", "710": "pencil sharpener", "711": "perfume, essence", "712": "Petri dish", "713": "photocopier", "714": "pick, plectrum, plectron", "715": "pickelhaube", "716": "picket fence, paling", "717": "pickup, pickup truck", "718": "pier", "719": "piggy bank, penny bank", "720": "pill bottle", "721": "pillow", "722": "ping-pong ball", "723": "pinwheel", "724": "pirate, pirate ship", "725": "pitcher, ewer", "726": "plane, carpenter's plane, woodworking plane", "727": "planetarium", "728": "plastic bag", "729": "plate rack", "730": "plow, plough", "731": "plunger, plumber's helper", "732": "Polaroid camera, Polaroid Land camera", "733": "pole", "734": "police van, police wagon, paddy wagon, patrol wagon, wagon, black Maria", "735": "poncho", "736": "pool table, billiard table, snooker table", "737": "pop bottle, soda bottle", "738": "pot, flowerpot", "739": "potter's wheel", "740": "power drill", "741": "prayer rug, prayer mat", "742": "printer", "743": "prison, prison house", "744": "projectile, missile", "745": "projector", "746": "puck, hockey puck", "747": "punching bag, punch bag, punching ball, punchball", "748": "purse", "749": "quill, quill pen", "750": "quilt, comforter, comfort, puff", "751": "racer, race car, racing car", "752": "racket, racquet", "753": "radiator", "754": "radio, wireless", "755": "radio telescope, radio reflector", "756": "rain barrel", "757": "recreational vehicle, RV, R.V.", "758": "reel", "759": "reflex camera", "760": "refrigerator, icebox", "761": "remote control, remote", "762": "restaurant, eating house, eating place, eatery", "763": "revolver, six-gun, six-shooter", "764": "rifle", "765": "rocking chair, rocker", "766": "rotisserie", "767": "rubber eraser, rubber, pencil eraser", "768": "rugby ball", "769": "rule, ruler", "770": "running shoe", "771": "safe", "772": "safety pin", "773": "saltshaker, salt shaker", "774": "sandal", "775": "sarong", "776": "sax, saxophone", "777": "scabbard", "778": "scale, weighing machine", "779": "school bus", "780": "schooner", "781": "scoreboard", "782": "screen, CRT screen", "783": "screw", "784": "screwdriver", "785": "seat belt, seatbelt", "786": "sewing machine", "787": "shield, buckler", "788": "shoe shop, shoe-shop, shoe store", "789": "shoji", "790": "shopping basket", "791": "shopping cart", "792": "shovel", "793": "shower cap", "794": "shower curtain", "795": "ski", "796": "ski mask", "797": "sleeping bag", "798": "slide rule, slipstick", "799": "sliding door", "800": "slot, one-armed bandit", "801": "snorkel", "802": "snowmobile", "803": "snowplow, snowplough", "804": "soap dispenser", "805": "soccer ball", "806": "sock", "807": "solar dish, solar collector, solar furnace", "808": "sombrero", "809": "soup bowl", "810": "space bar", "811": "space heater", "812": "space shuttle", "813": "spatula", "814": "speedboat", "815": "spider web, spider's web", "816": "spindle", "817": "sports car, sport car", "818": "spotlight, spot", "819": "stage", "820": "steam locomotive", "821": "steel arch bridge", "822": "steel drum", "823": "stethoscope", "824": "stole", "825": "stone wall", "826": "stopwatch, stop watch", "827": "stove", "828": "strainer", "829": "streetcar, tram, tramcar, trolley, trolley car", "830": "stretcher", "831": "studio couch, day bed", "832": "stupa, tope", "833": "submarine, pigboat, sub, U-boat", "834": "suit, suit of clothes", "835": "sundial", "836": "sunglass", "837": "sunglasses, dark glasses, shades", "838": "sunscreen, sunblock, sun blocker", "839": "suspension bridge", "840": "swab, swob, mop", "841": "sweatshirt", "842": "swimming trunks, bathing trunks", "843": "swing", "844": "switch, electric switch, electrical switch", "845": "syringe", "846": "table lamp", "847": "tank, army tank, armored combat vehicle, armoured combat vehicle", "848": "tape player", "849": "teapot", "850": "teddy, teddy bear", "851": "television, television system", "852": "tennis ball", "853": "thatch, thatched roof", "854": "theater curtain, theatre curtain", "855": "thimble", "856": "thresher, thrasher, threshing machine", "857": "throne", "858": "tile roof", "859": "toaster", "860": "tobacco shop, tobacconist shop, tobacconist", "861": "toilet seat", "862": "torch", "863": "totem pole", "864": "tow truck, tow car, wrecker", "865": "toyshop", "866": "tractor", "867": "trailer truck, tractor trailer, trucking rig, rig, articulated lorry, semi", "868": "tray", "869": "trench coat", "870": "tricycle, trike, velocipede", "871": "trimaran", "872": "tripod", "873": "triumphal arch", "874": "trolleybus, trolley coach, trackless trolley", "875": "trombone", "876": "tub, vat", "877": "turnstile", "878": "typewriter keyboard", "879": "umbrella", "880": "unicycle, monocycle", "881": "upright, upright piano", "882": "vacuum, vacuum cleaner", "883": "vase", "884": "vault", "885": "velvet", "886": "vending machine", "887": "vestment", "888": "viaduct", "889": "violin, fiddle", "890": "volleyball", "891": "waffle iron", "892": "wall clock", "893": "wallet, billfold, notecase, pocketbook", "894": "wardrobe, closet, press", "895": "warplane, military plane", "896": "washbasin, handbasin, washbowl, lavabo, wash-hand basin", "897": "washer, automatic washer, washing machine", "898": "water bottle", "899": "water jug", "900": "water tower", "901": "whiskey jug", "902": "whistle", "903": "wig", "904": "window screen", "905": "window shade", "906": "Windsor tie", "907": "wine bottle", "908": "wing", "909": "wok", "910": "wooden spoon", "911": "wool, woolen, woollen", "912": "worm fence, snake fence, snake-rail fence, Virginia fence", "913": "wreck", "914": "yawl", "915": "yurt", "916": "web site, website, internet site, site", "917": "comic book", "918": "crossword puzzle, crossword", "919": "street sign", "920": "traffic light, traffic signal, stoplight", "921": "book jacket, dust cover, dust jacket, dust wrapper", "922": "menu", "923": "plate", "924": "guacamole", "925": "consomme", "926": "hot pot, hotpot", "927": "trifle", "928": "ice cream, icecream", "929": "ice lolly, lolly, lollipop, popsicle", "930": "French loaf", "931": "bagel, beigel", "932": "pretzel", "933": "cheeseburger", "934": "hotdog, hot dog, red hot", "935": "mashed potato", "936": "head cabbage", "937": "broccoli", "938": "cauliflower", "939": "zucchini, courgette", "940": "spaghetti squash", "941": "acorn squash", "942": "butternut squash", "943": "cucumber, cuke", "944": "artichoke, globe artichoke", "945": "bell pepper", "946": "cardoon", "947": "mushroom", "948": "Granny Smith", "949": "strawberry", "950": "orange", "951": "lemon", "952": "fig", "953": "pineapple, ananas", "954": "banana", "955": "jackfruit, jak, jack", "956": "custard apple", "957": "pomegranate", "958": "hay", "959": "carbonara", "960": "chocolate sauce, chocolate syrup", "961": "dough", "962": "meat loaf, meatloaf", "963": "pizza, pizza pie", "964": "potpie", "965": "burrito", "966": "red wine", "967": "espresso", "968": "cup", "969": "eggnog", "970": "alp", "971": "bubble", "972": "cliff, drop, drop-off", "973": "coral reef", "974": "geyser", "975": "lakeside, lakeshore", "976": "promontory, headland, head, foreland", "977": "sandbar, sand bar", "978": "seashore, coast, seacoast, sea-coast", "979": "valley, vale", "980": "volcano", "981": "ballplayer, baseball player", "982": "groom, bridegroom", "983": "scuba diver", "984": "rapeseed", "985": "daisy", "986": "yellow lady's slipper, yellow lady-slipper, Cypripedium calceolus, Cypripedium parviflorum", "987": "corn", "988": "acorn", "989": "hip, rose hip, rosehip", "990": "buckeye, horse chestnut, conker", "991": "coral fungus", "992": "agaric", "993": "gyromitra", "994": "stinkhorn, carrion fungus", "995": "earthstar", "996": "hen-of-the-woods, hen of the woods, Polyporus frondosus, Grifola frondosa", "997": "bolete", "998": "ear, spike, capitulum", "999": "toilet tissue, toilet paper, bathroom tissue"} diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/README.md b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/README.md deleted file mode 100644 index dc29bf331..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/data/README.md +++ /dev/null @@ -1,43 +0,0 @@ -# Usage - -## Step1: Download datasets - -Contains MNIST, CIFAR10&CIFAR100, TinyImageNet_200, MiniImageNet_1K, ImageNet_1K, Caltech101&Caltech256 and more etc. - -- [Google Driver](https://drive.google.com/drive/folders/1f-NSpZc07Qlzhgi6EbBEI1wTkN1MxPbQ?usp=sharing) -- [Baidu Driver](https://pan.baidu.com/s/1arNM38vhDT7p4jKeD4sqwA?pwd=llot) - -## Step2: Prepare the dataset in the following format - -```text -# Dataset struct -- ImageNet_1K - - ILSVRC2012_img_train - - ILSVRC2012_img_train.tar - - ILSVRC2012_img_val - - ILSVRC2012_img_val.tar - - valprep.sh -``` - -## Step3: Preprocess the dataset - -```bash -cd /scripts -bash preprocess_imagenet.sh -``` - -## Step4: Check that the final dataset directory schema is completely correct - -```text -# Train dataset -- ImageNet_1K - - ILSVRC2012_img_train - - n01440764 - - n01440764_18.JPEG - - ... - - ILSVRC2012_img_val - - n01440764 - - ILSVRC2012_val_00000293.JPEG - - ... -``` - diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/dataset.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/dataset.py deleted file mode 100644 index 19cb0bf7c..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/dataset.py +++ /dev/null @@ -1,230 +0,0 @@ -# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -import queue -import sys -import threading -from glob import glob - -import cv2 -import torch -from PIL import Image -from torch.utils.data import Dataset, DataLoader -from torchvision import transforms -from torchvision.datasets.folder import find_classes -from torchvision.transforms import TrivialAugmentWide - -import imgproc - -__all__ = [ - "ImageDataset", - "PrefetchGenerator", "PrefetchDataLoader", "CPUPrefetcher", "CUDAPrefetcher", -] - -# Image formats supported by the image processing library -IMG_EXTENSIONS = ("jpg", "jpeg", "png", "ppm", "bmp", "pgm", "tif", "tiff", "webp") - -# The delimiter is not the same between different platforms -if sys.platform == "win32": - delimiter = "\\" -else: - delimiter = "/" - - -class ImageDataset(Dataset): - """Define training/valid dataset loading methods. - - Args: - image_dir (str): Train/Valid dataset address. - image_size (int): Image size. - mode (str): Data set loading method, the training data set is for data enhancement, - and the verification data set is not for data enhancement. - """ - - def __init__(self, image_dir: str, image_size: int, mean: list, std: list, mode: str) -> None: - super(ImageDataset, self).__init__() - # Iterate over all image paths - self.image_file_paths = glob(f"{image_dir}/*/*") - # Form image class label pairs by the folder where the image is located - _, self.class_to_idx = find_classes(image_dir) - self.image_size = image_size - self.mode = mode - self.delimiter = delimiter - - if self.mode == "Train": - # Use PyTorch's own data enhancement to enlarge and enhance data - self.pre_transform = transforms.Compose([ - transforms.RandomResizedCrop(self.image_size), - TrivialAugmentWide(), - transforms.RandomRotation([0, 270]), - transforms.RandomHorizontalFlip(0.5), - transforms.RandomVerticalFlip(0.5), - ]) - elif self.mode == "Valid" or self.mode == "Test": - # Use PyTorch's own data enhancement to enlarge and enhance data - self.pre_transform = transforms.Compose([ - transforms.Resize(256), - transforms.CenterCrop([self.image_size, self.image_size]), - ]) - else: - raise "Unsupported data read type. Please use `Train` or `Valid` or `Test`" - - self.post_transform = transforms.Compose([ - transforms.ConvertImageDtype(torch.float), - transforms.Normalize(mean, std) - ]) - - def __getitem__(self, batch_index: int) -> [torch.Tensor, int]: - image_dir, image_name = self.image_file_paths[batch_index].split(self.delimiter)[-2:] - # Read a batch of image data - if image_name.split(".")[-1].lower() in IMG_EXTENSIONS: - image = cv2.imread(self.image_file_paths[batch_index]) - target = self.class_to_idx[image_dir] - else: - raise ValueError(f"Unsupported image extensions, Only support `{IMG_EXTENSIONS}`, " - "please check the image file extensions.") - - # BGR to RGB - image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) - - # OpenCV convert PIL - image = Image.fromarray(image) - - # Data preprocess - image = self.pre_transform(image) - - # Convert image data into Tensor stream format (PyTorch). - # Note: The range of input and output is between [0, 1] - tensor = imgproc.image_to_tensor(image, False, False) - - # Data postprocess - tensor = self.post_transform(tensor) - - return {"image": tensor, "target": target} - - def __len__(self) -> int: - return len(self.image_file_paths) - - -class PrefetchGenerator(threading.Thread): - """A fast data prefetch generator. - - Args: - generator: Data generator. - num_data_prefetch_queue (int): How many early data load queues. - """ - - def __init__(self, generator, num_data_prefetch_queue: int) -> None: - threading.Thread.__init__(self) - self.queue = queue.Queue(num_data_prefetch_queue) - self.generator = generator - self.daemon = True - self.start() - - def run(self) -> None: - for item in self.generator: - self.queue.put(item) - self.queue.put(None) - - def __next__(self): - next_item = self.queue.get() - if next_item is None: - raise StopIteration - return next_item - - def __iter__(self): - return self - - -class PrefetchDataLoader(DataLoader): - """A fast data prefetch dataloader. - - Args: - num_data_prefetch_queue (int): How many early data load queues. - kwargs (dict): Other extended parameters. - """ - - def __init__(self, num_data_prefetch_queue: int, **kwargs) -> None: - self.num_data_prefetch_queue = num_data_prefetch_queue - super(PrefetchDataLoader, self).__init__(**kwargs) - - def __iter__(self): - return PrefetchGenerator(super().__iter__(), self.num_data_prefetch_queue) - - -class CPUPrefetcher: - """Use the CPU side to accelerate data reading. - - Args: - dataloader (DataLoader): Data loader. Combines a dataset and a sampler, - and provides an iterable over the given dataset. - """ - - def __init__(self, dataloader) -> None: - self.original_dataloader = dataloader - self.data = iter(dataloader) - - def next(self): - try: - return next(self.data) - except StopIteration: - return None - - def reset(self): - self.data = iter(self.original_dataloader) - - def __len__(self) -> int: - return len(self.original_dataloader) - - -class CUDAPrefetcher: - """Use the CUDA side to accelerate data reading. - - Args: - dataloader (DataLoader): Data loader. Combines a dataset and a sampler, and provides an iterable over the given dataset. - device (torch.device): Specify running device. - """ - - def __init__(self, dataloader, device: torch.device): - self.batch_data = None - self.original_dataloader = dataloader - self.device = device - - self.data = iter(dataloader) - self.stream = torch.cuda.Stream() - self.preload() - - def preload(self): - try: - self.batch_data = next(self.data) - except StopIteration: - self.batch_data = None - return None - - with torch.cuda.stream(self.stream): - for k, v in self.batch_data.items(): - if torch.is_tensor(v): - self.batch_data[k] = self.batch_data[k].to(self.device, non_blocking=True) - - def next(self): - torch.cuda.current_stream().wait_stream(self.stream) - batch_data = self.batch_data - self.preload() - return batch_data - - def reset(self): - self.data = iter(self.original_dataloader) - self.preload() - - def __len__(self) -> int: - return len(self.original_dataloader) diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/figure/n01440764_36.JPEG b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/figure/n01440764_36.JPEG deleted file mode 100644 index a04ee688e532213638cd21e95ed8af5bfc2505dc..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 35135 zcmeFZWmp``7AQKnyE`Eef(Lg=@Zc^%0>dD|XK)Wmkl+wJ!Ciwp1SdEQt_kig$zX3t z&OYbtd+(F|-gm$EC)Hn7*OFRm)#_@i)m?Wpci#Z`&*c>500;;O00sC4xZ9_8Qnz+= zafDbqI??lTJ^=_lQ&d4_0^BG3U*&%l_cWyaUkLpB)&KC~Z4V}#_&Xu|vgiC4pa9^qw1iXa z;3ew6TK&dkCa3%x!vm)v01V;fUp)N3FdYBE{F3(!wbKz`YlL`-k$rhNK8De)s1d1OO7?xeuN(0p8*M<)@J4XjAIQ%OqgDhSC8-C{EX#X$xml>K?)_>9OO+$s)*#JQh zT{~q6976b;9+Y1+IGPZi(S8AnGXEtT%?j-3>hw1hsw3FO%EkfY;A#(VgDtCdFJ0{y z4i4j(xw<&YgB(C$GZ&Br90AbwbOQa08S^*uJ?5UGXm3Ri-~I3IKawlhPFq%A8?NfV zod0)>3d9QZHw>zoor|`a)!%TqK#-lCHps(85u%{2qHONyXm>9V{V(+2m@%v!!Cp`8 zY^?tB6yER7EBu9qw?wl5S(>@pxxlL!ZXmGB|AJljFZAEpvCOTc9qk;!|K(hQ-}0Zy z|C<1B0#J2yxc4=Ni=&g8s|y75*TA@^VcNk5(*K0THg|M^52*hMj|D%(*8d~>uQQO~ zUKsou`THCr{sr9+++WulWbAvGwEZ52=Ky$q39rKU(eNs1E`S_>aQ9zVTEu@}X#vj1nBVY1Rx54P>EwQE1o@MB++a)=j8(jj!C02#cCaW zKL$ehNwX^7Jd%miwvg?}b`c;O*I5CbRK4wlx~`teF@MN;R~_Fqv38bQ(>=LATtF-THsSx=0_Ojia5o3Q zM*5|b03Zp-Ob-2EOJ_qbQiq!Oq+!@{bs&wrXKtf=rJ8r^LXh|*#z zVaPD@PjzjR_HQV_;S=}YsCBkzMBCWDu^MTu@il7728r~~!xEHQf}?xiNW`_BJ)ee! zChwS=2B~d7_G~DUE5x_wddaEbxMt! z?l?J8`om*tVVz1oDgVMC^C% zoWGO}_z`NL=7Q+mF}G7~u?Upo7clyU{yu5*_(saNU&vI;uW}RdIK+9Ex{VQLOVH~W}IMd$+>7ET087hrU3%!%LIQGk3`z~BraHY z#b%r9>ri6fJ5pHh;1^vx{9Th<+KTG4wls4+mI76>~lkU2Vi39lQDojEDoOF;b;?l z__%st$#lC`hL1GJ`)0)Z=26vD@y{BEFluzS4ycw&RPHy_-i=QOi&eKwJChXc2?@Zt zdBdsCvmpNWGt{eyQ98qrrNbdm`cR8O-jsosvDticbpMQWc;tfvc3?~Ep{ydmmiG!XP3(y0Mh`NO5|YN@@GdU}^8|1aK1ef~W2dkmG+XU+@`EW1y(W zA)6W61da2wLwDb1x2IZ9H9kwN(eLPU0M)##+*J`UV#ysUji5bkxDbc?=N3HpemtUA z9YG{QPo^~{H7>$b!P1QWphTojNUb_z$WF+!T(HNFOK(@TJs+cXR(55Y50MqwJJr55 z{fEfCBQSR0*Mx1((Y2T7SUg_7j^`|yfsG%5)-S0T62C6Z-2o)MHoS<>s4AW6f@Urg z+qhIlB<=vto^;Zd%WMAAL@TnfUO1_8%mL`TphKtPCX$GzLX$Bw7=HkOXnEY~$Q$tttj@cdb#u%5boc;&1>edoY%o9qMo+Bzo!@lQF5xG=hpAF4hDj)gT@ zv7T&`(i{L{ef))-uil3iz6)~OErc{67~tB>yAUc!WD5n$WiMX!$2Z`do3J5ATci)A z51F~2`+of>U#N!nMvWIUA5tqkU4N^t|C2@{A2yWHN!hWR-bZLw4ZXVXpU|xx6iU~| zBL4~#scHLEw!A4>y;)#>^Jyu&!0p?`fGq=4`jl!Yn}X-{b4a7F)@$DYeIWGFE5=TR zGq}TU50?!+eAT8PJ39|*X4)BWrz9lXl~VWX)JATd^}EaJ_5LS74{;; zz@T2Iq2oO#1w#g#O^E}Jt=ts>**mzVXZP6pmb(qrSZLp*rFc_Csq707so>ZqH{>jGdArvnKyB%^D_YCPma8Uj zovqe4)*3eIL=3-TJEZDkIG#G3?N-6hX0EgJp$xBNJf}zP`nE4yk)-u;55`~uO{XNh z;b}EcSvln040qGlD&M}Z!uT+6@_Ln&q!3!KP2CHDC3gUnkRF3O0D3wd7Fj;DGDE@D z!ot)7>u{pp0cc#C;tx*WlpJS2NSydMfJ1_`UDH0;02AJi_6z8bS(jdbdbk6R15b4w8U5Z4qnt8!MH2+;Bvjl~=&89_jKw z+cIx4%nVWnm1d23w@3qg{XOvrB$Jk3+4;&$?VT2U@p?_`KX7U6mm0e@yP0;E& z6_pMNRfGUKQ&%)g@%Y^G5X`Mc-;yKQ?)@Q%-I?>;zr7}dY}xJ5;i9ruVq~mjfvcpx zT4bjfI2&ebIOaKdfa44?bp>u!PKloS1-i(r7NDOI6<+(>7ADK9 z+QFB;@QXNXg1~RXB`~}S6XAO0tRl98h6s*Xd-YB3d_6C}>=sEq$I$YV z_L@jsY|c<@=Y_fOTOiFD@n_TR2^Pz6YP`i;h1=)PF4#h8kR(bc=2e2STZ;}75r6g`5eX8Q|EJ$d{&scUp2g zavFvZf#dK!(}o8-0{TVdhKwZ(h40iLxg~05w~dQ;8Yrk#V`IihMn*(w54`Cob){4q za%-6_c}Y(=G9KvvN%W)9P|fYZWZHC|?GMZn1h1J?1Jc>)<=#k6^g{GnsM z`pqNw4dY>l=#!sqcjG<6IxpOJ<6|OwYI+~a4J@e68H_aC0lr=qT24bF9NhTjUWmR~ zpEr^SbFa5wJXqdWGkxaf%HgIpq|Ki>vHE`Cs~s#H#II|hqgv;_-L`8hze~%ZAAgfb z^E#OH{i`NTxo^|@xBlX3<$33t^fTv_d#~%AKKC(YXkNARTugnNw!glqHGl~?+&V7R z*wv=8f*OQ|s^lJ6Cd{ELgZkrBvV`8F)QLDN#v zfZi`MEF6Zoue+yy2hg~1=nz2nV4e`Y14!#T+4`dS;_c|(0XAFspWOyq84i^$RZ-3x zqP#?OLzI417=8h#R|)Fx+cWay+E5tOB@@p1EN}j zq*{^ z_7|G=j2AT*=6H9C7JRJQXsN%*23)mV<9SQU`SqOIQ;-hT2vpyYu;qpc%BI zvSYU7i{nN*Q}H`n+_&v(YOZ0Coid|PRAC0Jw%9N0z$xp3+EUKyARe<}TuBA^<5}9F zRo)~*#3zcNm#7(yKNp{V7>Bh_ylE6=_j2fcIR4O#Bcdg;by{_C?-{%I4u>=S8&2j# zx=@YE<^(&tYN@fIC&rSgma}T%KKkH6<@d9noozXTszoY&+jBy0r#IT%H<@bUuLio` zyxII&cEP*VqMNl_TrV9q_mr|J1pbIQt2>Z(#RhT>6AZDBm7>FWQTNkiIWJw_Qt%F1 zY{IPWfc2!Ja`{58$-5n!T>xikXWrJ`YfArclhw;1TV<0Eh*~{uJ6|vUt)+{*btuy9 zrgnb+TymTR{@Bv;M7BM=Smm>dw8O@g{>@-Hq$S?VKku2ibUu}FoKDjxho1Tu8grWo z-c-uNYK2-EgfD#PDv&A*;GZwib#rHac;fUGOtAwE2Pl1W%g*(8ktp}e z`5SAxO{XO)EYIehIddsQ?QqF4 zZ)07IlAt}!Qzte$$Ig(@*`j0(+;iXFuVNa$1B{0r7ZXo7Z}&Qg2YqIrKLDms?E+)! zCeD*>P3Fta?QX$s8!bDna%?Dm3GOX>mrtg#V~>X-UbjU|diig&KNqUB6vRiC4%DxnJfRU_?jhvi z;bQVQ;3?(ayAKWVWUyEO^J~Tr=MxJb#fs^x#yFg2=#yby@Y7k?a&_->&FZO%48`q8 z4bjRN*rG&x8(=XL&Ur}**J2u!xTmJFQ97h$I4s3IryXld48m-P{oIt{vnf&{ZgmdpUcC7 z&|1jpEDH(xUdTPo=fkU%olJ0W?!K8uIzDuywT`$Sz9{)EtXT0N; z2C;VJouh?8O(#ajJH|#)s?8}2G+GBfFi(k*aXYIYYMb0%!RiqE>^)aXRjnJta$ukL z+}L(&zV1zv`~2Zzo7E4IJAhpE5i9}nBGE(<;m~rVS_)$4U42eD*VgInCNV8~rgI1A zphrL)uB{6zbkOroVuO#Q9c$;S2POO&dU)(-sn_#tFn{Yi0CokK-POMA$IOD9zra{) z3*Oud?>aK?L)$xm!mgxT%@dItvmf=B&C{;to!jZ&M$k6@lFO+n_^ZXwt2+Q>;)bCx zQO@x9hbAOjlSN#bOhoXuXzU;)47d7 zV`|_m+$)T;GDwN_?B#o19c3zUNTs_p)c@06cRKQr3Q7q zklenZ8+W_|D8l$~Q?^L#!0QQG_AlU1ERKa*1~3BAnI8VuI{>iGLE25nle|W}6lEWh zG;opbqLem|b^EUR%4B5CUm#_}-q+2trUZgf*=VygRTE*GGHTjT0;Ge#V(zfUub z{7Sx`qs@j+arAW!dZO0x)tTW=mO}PGX*H3z5Gedv@hXg}$VH-f3_=w=u?kAxMTLcH6%p*59n7qDk#`FuR-wZrWe+!yi=6{1sah?*Q3~ z>G~pqdfNsJUUGiDT=K32% z-|foymdGo^g>PP7Ei=x2JB56Dct2wG(rBvQ?{XX*WI|F?A6y?J(JNp?4gGmt(*&(! z7?j%S{CFAVY}?L=H&oIzbGV}$xkLBi%jRU$gWx#@97c?Deg2unQdiUSp{=v0%k6iw zzh}1}{F=+vxu1i+pV!3$JOx+)9N~c@5P%+_4zPyTUEmq=s|F8<0f_!@IOqX_08V(I z2`=!rhToAXv|phgxQ@H;WKdZT8%qE{MFqeLXF~^|ArJzPel>ubfItaA`Beu1%n_*m zU4Mn}_+LEm5EBCTuRzoN%sd!@=il|9V4L4s;P{X5d><@Qfam*;{o4PUai{-Jyi*V! z@8o&%gkOaFiO3T{dTu@u9zGGi`ye3zDFflRzPmdBAOrE=a{Drn{$2koMuv)j^e@}s zef^L92&Dh9AA$D2?MFoTO9sLt#DB|%#}0m{zuEjJmH|1Vn**_Z|49J@epVJ)j$MZzejU1)Bj>|u@z_5S5c>Tadfn^wQ-^6 z;S}QJ;o#+Er?&)~*@N62!M60=oczpikzfl;5ltDne+h&`;>`c*n7g|>r#mmFBiM?I zTUc0_>j@7R4-W^Ng9GB};9};%;Q)CE*Z0d18~uMb`pyNwskq7<}^Pkc6$6f!(1OJHm&*=K&u7Bi#f5iM}bp3JHKk~poV*WF_{-1Uz<@Q;}P-;OT${LWvqLLdkDRFFG-&Ii5_f;0dL@jl&`NbrP=ihQ5YQBhG) zG0-tFG0-tEFtPElF|ly5FfeckaB%VP@d@xTu?dL?@rmGd{Cgn?_bri-(cum8u`saU z*ngOQFNTl+=tDw)uO9GwF@!DvHT=_10MdUJK>z^YD~3dao7pA_^)38k{HrHwgg=0Tl@a1@&GHJi)(_BqVx> zfI`f`!%Of?`T^rpO)XRsZVhuYXE5Y-ARjPxOeQ8X>*)C8?~4(nwz!aZrD<=P2nVSO z4G_Kv^bFHqO+vgzlQi%wYjqd}Rw{IVG|p^s(lqj(nPhGWrhiX0Zo-X3Kvy5OoXmV| zld3tSnt^2|S9^d^Ft!l?n6W=;78?>YgF%#v03H(5UZ$x@7eivRc=sac`WZM|La5b96>L|K02ws56!LN)S_r_I&qNS`QlHaknjz` ze4=umpO<9pGKnYKrRQ3SqDp&KVB#g(iPG;9a~zj*DYuz7+)S;Q zzhd?NBvYCR9rFfsRWMA8xM1dsPKRM28%~6%s$;!NkaSn0PelTW!=~t5Tr-LodvyIY z3}RT8mVh~ib2`E-=hV~nVZu1AV{5|h{KkssQ{o-vibu9&43ga1!5V24KHcp z?LaSo8lY0a_Xa#$ITwnEOC6Qo?n|uh=*LkkUf&wr%h-PSkLT3&cvV0(ms#-cjo;))?Ob1~Pap_7ik^7x+P@+7z0f-$UrXkfI5gpD?6mufIQe zLERL!*6ZVEF$Gd_hXu$Nu-=9bH?q?GP)?5d8EetpsmdcIjOh@;ki|l}(nh~si(G-_ zY@dvy`w{O|XYhj!Iub!KdH%{lEk1c)R?$~gwR~8F;acUWc16b0J z@mMMRr}0J6m1y6$+%JpI<5H(?40?sdKULs}oW%Quo$R{1={bvXf1P;gXh`jMOs6`E zHk%%@oFVo+?_;MVf!wHWGG_ZA_FWtwdwIm12RQ7ctcE+9gOSl?#6`wJp1#f*h-}vP3gfVmYM`S?*PZ$9 ziIm{`2xs(^&xUsY;=0wSrrf5#Hd(IJK--&jHNLlvny zgo9v~3dPKb)c}RJ?Qu^Z9Go*eN()m)S;`!n5KW;`Xm|=(jlLeX?oKdz^UxXG4fR+U z7@$*)2&sTs6!{?ZYbO}oj7uCIO>(fpT??~O+aS`g(V`|e*u+Q2J2zH}Ms?;2O=R3Y z)63u=j^s(<_~{66fXp_F;3yhy>;*25J?*0Vjg6!TlQBag>KcWH4{4_a05lHA9>7so3 z26=aQuV9n`b}I47&zODG%lTYljDHJ?Y4Bu+EFj*Jge?|V;;85O8)oWvIqlTW-201= z?HKE-t43gS0_|kK{TNRi1I?IxGU=zGqny(yK);nkfPzh<&?i5pBA&HMrT&aXh|>W( zI;ZA7Bv+U;=L^(IT=WA|NJbIwLhEUM^D}B!x6sw}kha8ij)h13xU|i?og@L4k=+WZ z%qr2{fiJOjs?f;5^G|RC8zqU(5qpMR@@JPrTqdvwe{`8-aUE4ZKp+SVwRt9 zby(6Dzj_%0Q>}7yl;B<55oyi`3aYTqi_NqTUwTsD<+zr+$xx-~+g!bs44qb{P7aCFw#`D;Z+k0^K3CiIw~wH=D6>=N$rD=&$my&y*q$AuY+1i z=SK$+OPb=^o}E&*T7Gn6Ix&f{i+1HP#&8hzF(YKV?fs{{MVr*Dfvg2*lP=bYS_^{j zr;lAvuU$`Vpu|bF>2iTa(H6qDD>;*_T4@(X7;)U3$M46QqB_g^8fUYO0w+a)B(E<_ zR}%>t3C7rM!apyU$sA5TZB4yi^v|n#ndd2L4N}v>_U&bXN_?q1e~_?Pyv#SlDI`o- zJ7zB4k(AX`S+v)N08M(7J;BRV7?iE7Rp|TvTN|-e6IeUVMWK$<85AiS40T9B(w_AB zcKmcZqm6B^PVv3$P$6|-Z&Cwdg3O!7#I&s8+l)u1d03FQpJz6){jrDHi2`!BeV}DD zK6UeX$%Zikdc>B7?A0nC_0qzaD!IH##$N`nLTM7^bzv{>07*Os*LJI9c)4zRZHgI! z^WTe7sj^ruX|`;xFUO`TRu8E(UGND_-yfch z>r8%?OD_m3Sj)?~3?TK?!{{p%JJbuN!+*F|CbSxnua-yuuuLzs?5k6hSCIyhfsb)JMGooWbIenP|k;% zfR_smOD3|LuObDqwlU;v#6q3B!F0N!+vF5i>Cq`=Qq%)}g{myrnzMq~ik0j+?xB=+ zvjWIDWe9hGmVB-iBCQ#@UG6A{=?n$R7v;RNt{q?7J!o7+%vqZ34+7s+?#a$I5?8h} zJ4F$}-ta$no!FOb?|4m~lHV0R+edTKr<{raztP#gfBD?>@vgX4;Isa=5@?mZNH;j} zToYuLjwTD{EA*pT=TT>oX$3WnLCTqBdS5+#SyN#8v7?soHP210Gqy15BO^2!!vnOPA$o{`~KqENee+}`Y& zQm9E2^Vo~aanQ>{`Vj^hDb=30BtD^v-ahBWwXtD#A5|wdcoNErhT+Qzpok&`*Z$=D zs`ID^`-E1Z7hARhRyJ?Zwg`0=$|(^UF4DB`NlWIDf>@BsnIotktIVI`K)P=q|exllG$4_eaRi&4aNy*!7jaFMCOuO1&~>({pl(R#T#uu3XPjeO|IkKJbx(W<%fWpu}oa8en8)rFTshOxt<$eT{?#M?84OO24oz-vpN z>hcHA@{ZxZm^+6Vgd_iyJP1?=r`Y-jveM=Ms0lM}VlAR$x@9!x4jrZ$vmLDEeS9|K z++9b#4?=ESU7}e}P+V*5d$H=Pnz56gBSAZE-%FK__}No-!{QiRVV{Mqt>MP5Ex$qA zrlQ;+`WCc?IKgs2^EAojrI)Bm)K121`YwG-e*x0smTBruqj=jn!)H>LZK7*&b@80D z5b(Uivs%kt8R;>ib%EoPFG|-guI#X&^Cl;);h+I!am2K%$kRwjOpo*d<)d#8Vo4c| zUkrRN!8BO92BQ}f)={i5_P;I4Z?$BL?d?wW^l(G|#NdSFsD9njMq^vqX{m^+qxrOQ zxs@6-Kj3IF7NZvU8X&S8B+U5;<4St#6Ze!|g@sSd8ynvz=L+)YML7oS-IuE35@}MF z;8SiRVcWU4Wehi>O524@~NZtE+6CWCgGl_Yiq1t7oOQpdxw_G)HW+1u&jv14?bFdPKygQ zBGRfo)FJhXWrzBxd}BWA4@VzARvr_z_4_J+-Z!B-YJEf3TZKo?z%XcO*xTp6sG;Ne zwuLOowg?m?{yea@ImJ1s*yv?}O8h%YD)!oMF$f8dWQ2a&rKiOdE;Wdnkp|N_=_ETp zbz>3L6H6!Ssv1HfG>Vb8Sakg``7lQ2GKGIPkg9a>s3e_1sCh0aNc=~jXMJJo{Mr1- z_YA%URA=A;Mwvo9_g59ZBT@$0gjdoDz)QE=wxtPOG;8s}%)|iKIOk)+OLO2+uU|S& z?T_8@@VrmYGv<|VdlPXwx^faw5|obAc;@Z(@w1&3!f-~W=sh4#O8D$xF5Sh@woYB} zMO;&s&hvu#iJ6MXw651(3tV~yKDDq%{)=Pu2LxF?L4uRD+Kn`PTcg+FB2+#fsrn+0 z_lMR^mreF-#uo@dmtSEkx@Hbe?Ch2qxFp7Iscl2PzkV=2^a>ORHH)kJibcD5ZTYjwmK7P%yxQ)D_mQZYOM1 zak9?!CD^YK`OHS6kWL+oR$|tY&7>65^_36H?S#4e&Winj2Nv1IC{s%a`7 z7$7ljg;nHWq()NI7qzS(O`5mWW8@O-Q<364iqb9AV^yxhTfFcHKmLM2xeGOS7C2FP z*v#w=C4V#_$fi78xdoxAW65N#(@Z2!k6=jzSR>Fi{8%U}eNSiAvfMPVaWEOLh0CLl z@x7dYs>La*E7!WRplNpfYg6C%!gMz7ld|P3aKoSUbfo-@X zLw6?YN7TRuL`VzvPY}t!jh}xNP(Qck*)a&mdIi|ttDiL6PUP=6eUcXPQ|{?9fp}+y zz(V8C2h2l-Y)mQs$<}WsUc`>YqkJVvjV)qVc=Rwe?qQm~2uf3cb?a-s^;2SRCEP_i zRGF+WK4jYuAb&BtLBh{z{^=!BeI;=r+&zv9fo3{p`d`IwTQ#eL2c(;`7>BJowYkj$@m)aeVt@1ycE?3L05?F1? z!-%_m+@{YRMrBSS)t)PJW_1Txo0rx-`}&o<+@yAAvd{Ubf!4YA(|TBSGA;4vfeQ77 z#TQq_JH|8yESnRgP0_M>^MdhnJoK8gYrRpLqzj(C0lmauja?0QOano6Vt-J5E`j7zLwDGa^9Oc$`+zy$7kd9hqdW!h&w*6B1%HMf75M2Bth`u9>4S136?% zztBhTM)6X%5a{zzhhj(1Zv^>CSC9?l?SP(#4!nHiBb8Yv{fU%XpM-_T$OqU1VC~l}Nh6Mb&(Xbt9bNd`i&s@9C(*e}{6x!TOvAnJM#S@_!a1wt zxHvK%{l5N1t|bs$JQ+p8va;{lscG%@RV35C@xo=FqZ;FtN?#&vzW#Y<(@vpD0U4GJ z7@VBQ!?N*=vHkp%X(yog{A3EdLJYEft(xtonncGOEr0b)pWlJPNc#?8gDmn|hkVOE zCB%|B%Jls~aH*SKK9ry=F~p@HYDR6gAvs``S5p3vYA#|7x*x&fVrEa~omxJh4)@YV2zi>5U#n|KpxpsZ8XdO=a-oUEU)!j_an)SnKzs+ zY-K%MtSeS12A)u=H9-Z0sdMz3>EcxjwCPRIo);)bZ`CuNcFD#vt7@Kgxt%`km2X+| zi$Zvn!k3SwTDO#yxHW-CXM{jTi-$)Yo`vXn>a`zy=$OkCmrgz;aM;2__;Z?{G@MKA zm>1KG0Gu89(vw+s-i2<<(BX0T-1hAuG+MsUSbaxYM5leYfS8bw5bS)C%EXP2Z%>Re zbomvFVJgaQlc~&8)UVSVx{^yWi^~sVL{YH_t&O%IsuXVX1xU%r5{>wavxyndF@BN* zKHpdyet^T2bZQvEocaJh8=;b99>f}YB{8|3D{;{GlZzo~vOWe?yFErbcqJ+6*>SVh zC+={7xq{qGQn4kdHmZG9g@>0mmJ{>c#3IbHMJ=MU zYks|4)%CXq%B3EBSBb$jMT_5YRZ#8#giNX*)xbFdnB;+690KQDQYYuu1|GhWU2F#Q z&!XG}h+*P|FCw1CAYwCPkv}o<{R#e{wD_0 zXe^4jb~%r`GX|xMwTK{jyz?`rRxc8K(a74$Ds2azmovn-sW2%D_~bK5B)@r}@R0E* z5qaKO-s)f^VZc&w6bHta2kdA^YW&)gHzLf@MFrk$^bVmnfASpXEqnQ*b&r&ahB`LRqul7i8(D-2oDpPREP7lYJF2ZWi6OOLr!O ziGe|vAx_!NM*)2T$jyyKs}=ZmVQsgII%rw?7Ozhzw7FQ{adg~R^$-WkgpmbbC_&e+ zaU>7xbakD@6zPjqKH=_^YHbPPF>O_{DhsSS076=k50;5{N6*~stWD6*&oR*mGUk26 zR^PQb8CEN}h(PQcl;UaA`aa`T7oG;>>!{SJoO^D15|vDU*Zf4N08Nr#34TqLB0j0) zr4%)`2qj<-1u58{*o_n<*ql1FxR`h! z_vwWO4ZcRNW4dP2e1$V-7q6AT^_oX4_$~40lbPV@NxnhtS$^yv#r6IV@K)o^TP!Cs zJBkO<0=1r<$_>=VCDRVu$NCfpUHRmeq8Fw~$1cYmxqZc0`5|In9OK-fs{P-0gI>`rcBj;{msOH0W=4&}2;nphy{1(x8jU89L zD(c8o(z3XtG!NAj`(izn*bPwJ%U)$yGAX=2lBbZ33lQ&w4!D{|t!AwR@@?_aXDSTo zG3ynLSH*B|eqxtV84L+&jypEiVn4Czv7Sc;B8#R-;&47LRf%=Qh)sHAf~tZIq##q7 z6lWA+Kl4H%^?BzNWm0N1x+#lC_=qE{L#6tyXC7;6e!B=)itWQ%vC?xK!CVr-Bs-0@ zLEo4Sci;hhK6KHm?CbJnqGMUytB=4K13#Wjx)9P4%zc5yGP&$&w)28+`f;(RZej?k z8Sq6*dqvKVXB;rVx1y~0OlV2e2k4AgD zJhySq*ayY>bhr7^fhtT*c7IFJpAZ(YLV>{De)=O zMvIM1M(8BZt=ExLDG*WW8GQQ5X8y(nLuYGQozRroj!Gp`Mfq6clrVB@&m5HOrS~0r zI+|+{gC*;`KI+k+ma6g|f4jK>nSqHqd@#7W39=OmMbRojaC>mN&?S3B^fSnJG6-%zwpI=(0v^VF?Sg z2ZoZ4U^yUWjDOjUr&N*hvB;Bp`r1X!9i?R&+*|ZY;PC4v!!!`j`57$xm@t^f&Tu*8 zl|{_s+skI}x_L+Oi}8t3F)uJB(Ub*G#-hezW^b@T?CE$au(SCXRptjKCSv&^tLIi2 zso)L6EEwZV(*Z>)uj7jvgS|8~`daV~kQO-*t(~RyD)PB=6_?w?BN}0R3g!Awwh`x` zhVNf6vsG?hai0y=(jF~p_$uZ(@YKB%#1Cfg?pm^2^Dtu;HL(8Fw{UQFz%qZcF9N=J zOC715$fOoV{)FLy4H~$sa#m1{i^J1Rs?qEfdr*fQGZmS^0U&xO%=CT!K-*^K=aQgf z^{rJ@?XNW^&cm^cE$>x7qX#xmXsayjgLTzuK7*;zP-44y0TJjU1NT%4ehJ!1@9AbEzb{}Oqoy7&wXxF`##jIG0;Bt zNZCn=4%{Cd%GECLW4k!6f!?){fBl7^ET^y}U7VWl;9Hf>?lI!i`Gc~Mno%zhWR1ke z#ceUB?+~)n#!U-r8HT^pu*Vi~i?AaVdTx24&50>hz?l zltI$cc#Pg3SK(tl64j>BBNr361L&Jn4E(71F!=FDN6ERy&x?1Gm1D)Lm|~b8bSdJ# z5<926vVUl#q;#BnxW4#v3wvfX-DA>pO6o_q+sSj*!U!DV}> zvF>`22b3`&->T#Q)2&ouBVOGyQO>JKlaZ98Cx1h=Reyi1Vn^w^ra>fE-l>IGT+(h2 z7yBmaNAi=Ps-DLADdR#qW}IXxEDay{dc1c1oJ*E3ZZ~2Qf-uC=kR&}6uR1=Oe?P=2 z0wEP7n-?^I5L2)OAhVk;J@({4JXg4G;Z)7%mnXX;*UW;>E z7fWQ!Dt>y?DM4}RpNrR0vTe@40*;q$%5G#p^+>3id(izs7&|qd_feGe{#&}oSsUUv zjeN5D)=}!@8b-Wg&NTe+^`M99n3(DuUS&$XM3v<3C0baMZe`6%b)7F)5hQL7<}$W` zG!(O@to^9i&`R4xzp5K)_D&pdF=@jwP9+a}Lep2n>8ADb$*q>r=-gK?_xyp@h(4X9 z>7$%!5fqK+U^ZHylq#&&^vQe^2(~-#`~mxEeqhFDmX;`1D&y^n3@0+!v{+uEz(6&v0GmfA|@Lx8+eJAuGZmKGi=a7M5H& z--xYqtF2ePq0b(n8TgtzU;lwyweh*{tPb7Bqtj+K64o5SqR6ogEyEb{7ohytpLv)D zk`CG3dL=<{S$sUVKa#TQ_?!=*y7^zBOEc=pvjXh^g0_4F8YV7Zr(vexQry4|-O)?iI4L+jP zit7qjHvN()&lU4{6`CoklZ##?Y)19y@DJ^9)Qs3Ne9;!RCqG%l-*D07{N%zP%6OUp zl2UZ)8LDT~uDTF6OUn9Aiqppsq#{5c)c1JLrtJv{?{1ZEQDNTgQ5qj~hmmYKe1D|s z5OZ%{pu@7|X_jU|<<782@GD}N2j6jJ3->~d0@~F;*@%0`ac8V_e==Bj)g8K@lQ&o} zwUf}5w|#gr1p;T~L0_Z+Y#UPw2@4#zXwKX$qBM`(B>N!cB!w;>AmMz6=l0LNxx-@l z1mg2j_+j5duPdh_c-Dy0mA_p{hT{Ja?81Fun9}63hk<(a**;1jn+jGP4 z^6m3SZ{}L?T~4~UKZ2B-rJLW7kZ!TYd9)5D$hLm>F{)uzqKtW|iGpD0W$S zW$YvqAu69d?U#QJdnKEv3mt2BNTd}r*zj%cnU`oDn+dkSiGwz7?JX;_y+Q(s4GL1B z+kJc!dFaUAr4TkE-r89A5r1)SKJYu;*sdSa5-xQeBNGq0z^D?D#24u;n!@0cwxN=i z&^WC@VfP6(Q!CTTMa|Hz_x8_EGCm4EcN(@w8FeR3)^wI#=8{PF)=l`y9$pjB`b0i^ zv_F@08Rr7az)Q6(*X`^thA8+ykSr%SufA89^I0Y3H1g&U2WSE_-wR-d zRwis7jn;i^7V#_13sZ~Wj(@eziHCoP8BeLMSf*_$%3icH=oAh9+H9=)0<=xA4zPH* zxT}_Ebf6ar5fyR!y4dvaotynWgSh4sjh)ye&z%4vOT_kQ%wE(XJ?HJ#C`l4VN?%3; zqXL7K!BdK;NL#1KKmly{q*Jsjn*n z;=y!$exo1iQtBU{t|M`(#Vc9o35{fSXfNPM@)?s>sf?;JsLC9j%mI4qCeuf2$K9|~ z#|vv7B@=z<6HxU8TqH<(JO1452(FP>_A2;RgAIsicKbg9lN)U0OvrKi{WQF@<1cYU zqRc2vl{%KrMj|=)j(Dfv-{#-t)>NyLQb7D%)aj-gr}#I0{{V~!on)KFG;FzyA<~u3?PL9}*IJ3(hrS#t}x^Y(_ zp2}*CLZthIKyPxTI$XBIJ4CVHG~cIB%SW^&hD_J05oJd*nDfngl$$(=w)woZObA9& zi3YUX*safwa!NT5VTd4;d;C|=Zn^^XfCi0Xg8u+ID^Tb2+x?-lAPjR~k3i9}7iBs- zfVIvUzd_|y>v62{XT*#-P8xVwC+4QBu&ZR5prS9`>FL%xEcf+|+Jyqe;#CHXP_I;} zG6F(Sr_^K4Nu}XU1$mAdK0~|TYPnwT|4gw$shKK zyssmh700g1rNv$eTs@f#?e>{^q>Z|K)*5BfQ06#4ADMCU9V-+M9hls!C@u_JR^=C? z-LaXOAfXQ_OOkJ~4Ae)&Hl&JMm)`E%v&&I))Aq~7rWPT3f-!A=9p@paa(O2hkHPxz z!b_N@4jA&)bKHGhbbP5J>J(i~Lq~{HxUnh;H9X^1f@DI%m@*1eL^0HPoYv6VZX-nO?omGPp~~TgSf9s+(;7!P>g3Hwn5qrq^y?&-+XXyKl1BCcc(w<8I@BB z*}rId3)fYPNQDWH67=fRB*=8`BW2MKKg~7KRm-;ypX0PdrqYZ~--2}L!5ln0STf+K za>WHZcKB_hHTG$cp7C+Yf9R~}7YqhuL#ae|JJ8(NiQqKFL*wvWD;9gpwU!?ie$f<~ zgkNUGp8@FeZFgOvs*&b0VIh*ke{}uCP+#HJjQWU@8OvmDOX8Ct>~DR2+)IfiUNoIABCV zy%)cx`F-sYnslg4fR7J_#D~kh6?XpsE}G36%l`~t80->=H82VH2)St4h>?FSvi$bZZpK>R(8_v^T9E;u4-Xd>UXDGZf?}9{doTI4tXheX z18IyF%`S$-=~cWLdOR^K*OHhyzoVw!uie$zoU1W(hh@@yAF}O`hvOoVM{MN>V7~B@ zD50ugn{Tpwb(MBQYmhy|z8KN$*B&ZeHU{oA!*X0?1PGnjo80m9RL?KNN=q(RJe3J+ zcAa`vT870^u>r<8T%JUE1OpO6psQTOL|-q(eMA~G^*Inxn~*JkS+=c3=(4MH9!&UR z*qoU0-sa_)?e4{KaD_FWO)T-gpGid2?T;4wmo?>hc&W20QR36>_uORjO!E;CNTr5!vZXsY(_V33CU0g$TQsA2Iv7lFN-cz~y-K z<2Cr{R-nSANYrrUPGq-&-_ER?cOA^;Yxu^ zqSYWA8DB{+<+vHCZPUztzwj>T-Vkp2wF@nhX~r>eaxWC^n~KqeZM=5{h|PR`p8LD+ zOMP}k2cO?1j^6Gdp5N;A*F`T7`6KA^gJSI$inbgIJ!<8vT}H4er1R1r=8KH9(<9zN zf+oY$(!F)2vY;!DOyfV>BZ1eGvE86uRf24q<+qGSgk3{kn~I_*O%?`#T6LJ>{{Xau zkZ02B#UK9wFx^TdSmTO8WtoLB&>pYx3Kx2#EZ{HXrRC1 zOA^9r^$zH#R|Sfc^N}`Cw(ty$%b6W~lYdfl-?z_NYRP7r0`3GHv*3hs@%)^Jxu=(#6Qh77IHC(S@hVXF32#D)?J6(Uzo2=U8itOD~we8cA5e%hieRvGqmx? zZ>EH-=b1$sw5bl0i~u~@z;Wl}*}8qw`^l=ZJ(QO9IH8)Re=;KR)gjTt(6Uax9xgmO}CjNLQ(?V7}Q>Q0h zm1;#6EDJ#1g42TL5d0&)`~Y{v)jwuGzPiD+ex(`whYnfvZQ&E| zE@F`VH8Z4|ca*s#A0w(dmxd7j9XN}4LeMl^?WkzUhN|*eKIf8 zKU^1OE(8*RNC?qJL_&}yAo3A0b8U@x?(+2(h0i0GWyVh1zpb0T8;wu8y^eA*GNye8R`6^9(>sxOHouKH6;}OV5xWWboFl7Kcgw7q5FlqEh*8Rh-SCU&Z#Y%KG#BpWEL$pH_4faj;9kohr*}LfH;wj-lIl|`kjN;K4Q zR)bc0uH=ZZ=;24HRyl)YJpjowbd^=nPz+Ry=5t*R-NRKJnB+lJ-n3S65x5>5 ziz?o$L1EL4QaO%`Z8k3htVDd*X4S-1>*vu|S{vVRRc`dje$VYQcQ4vlH^*%@?EWL9 zsdPJj1sts+hRs^zrm*ELCb*v$7@%H_PRTLcm0HlJHSTY=J zYV7!VLPRVJI0zSZ>PiTNM5VvOTJcK$nM_|$3o90$7y%|5FNl^gziJ{o{14!tU1_eM z2~^RjN4TT2%?|-68})#hV~UrjwMYC5OvI=SPiVG{Cg_6oW|45ii&u*2Jp3^OXh;Vy z*{ZMYG497?`e?nx#ztdl?kPo;YrRdbTE5Vg70jcSW-JIO@6FtM{a?#gw4%f>(qO!B z*{pzK$3GCn^Te{;+S|<#z8mNa#apb&w&BtSJfV2r7V_-pW=O=Fv6 zmZd9*O;@9_YICTpHyOfT3lI#1HY99SRWeMxl)vGt#S%!FBqh!?TRc1Bu28B`li zyrW5`r)UR6h6(qDm1qEgPZ!?L76aLJ;Q z&&hC`YIz9r~fSEB9RrkZhl zgS0*!-51naT_N~%N`#Dw>8i};LTG@Ppl!N}sEGaqKhH?4=}&x-j5N6rTd7;N-FoJc zUbaF5&^&1is3(CG&G*0&&Y17qn2B$uV4i$F@zhc!a)CkNy}wkD+%~CG>Om=o+noG~ z0@1*!5Em?uZTkq8*R)q?3}o&IpQe^c7kwPZ(nn@9;EDcO-ZCd@eC22QtPcTxwX2|v{HA-Ayd%^O`Xg41LY?se&yLH>I z*I!$KbagtmpFGQr^!@@YTQ;QA&D5%sBT~XXB*HoGd4eUnY}%spHvF|lq2dPk#Ha0> z&tBBKY7Bv5G3GqU2=^oeoFV~FFE5i{KHr~X9z9BagRR{kY7(I_$sDkeA?^y2@18D;7k0 z!!(KZ^VIr-%nQVC0Y?lFeSEb{mo2u`%JG*2$EDWkbqpj^P8@-4!D)ds%alvgH2v?6 z`nAa+MhA_u-9?wARo*xz-)b`61Rq|fzRCXGNShyQt)`h$=``lWh!1n5cD(b%84!|m zK>$_T{B;?!0Xqr0wJp~krP&to%ydM2TkQLY>~HaK)ezm^_jR9Xw{Qe#G76PpEOR+ku3Jti$)ipWTlCBYZ@Za~18Y5miG5ZaGc z#5OVYZ|T-OWa&?-8-QiWsTe++7lyX|SgO^!P)?Li@=fTcgYr*d+jVc{6igEjhLKw7 zyD_C@=aRB5Bxf4Qt75a#i;~PBzKB^mfZF!+yklGG$eFRNMM$V1D2T zn6wRAgLc|3c&O0pRXNNIQk@1SGaDuAfyZr8+_p}J84u&E?t93q>IrfEmvY~f*_O=_ znmx5ilKU|!vLm`LxbGxeY~67HQMWS>Pt*I_ysN{&{{TMHODcL}WtT;{skJJ!sqv)K zY1OA0$^(Jo9U?N>K^J^+2QyJzvcR%GPJvbA`G#U-jRm`cpG$#uQ|_n}9%NZ{;f*p; zx+RfxS3}gkmF1wP=DIyKOVjqnCHIXv1J-@~F`Tp~Z-W#FgN5}sFXuB4NTDlqM+l>23u2xN(AVG(6?}Bg`5PxC^ZX0^s``0tkdxcv!^`vEE*4f z#wLO!B%dPwdgnKVT-6=EkNo%@j^KxP(cc`MWW((f_9wgeO;o5}7A+j=)=P{zIFYmF zzf0-4frjL+G`^&Ccm40KxVq|WuaZA$+(yY7V6)~k(uDr(je<7!piUdfqJ>QZ^4B-ptl_PG4#x}g_czGc+z zAu_y%oi?rQOi1Li@wn)hY2+d)@ceiDHC2NdNli*icG@!oom8jk+0&bu%?HUYU8CGyCeHPRId~68ytuk z1R^5&1wNbk=`}Qfv1emVtpua*W=NB)lf&p-ld1~&?f(GpNY*bvHePe{H+8!flNKvL z5}JAOa#K{$F_a>CHiErVy?s4LZDchzO-Tdb#+riNQj_gUl{P#o-?3m;9CA=no|eu` z7$zuhzN#aRJNJ*CtxTz7nCjU3VttqPkd;HJNRKv^8gzPpGMdY=%pWowi8>mn?u*xL zYs*IV4StM`6wEVAPVJ0&}cJU_2f zYu5ogHUu&jV{@PZNJ8hxWR0{11XV7my`+jLnzKvDTRX@wL8Q{^){Sc3tJ~A*;LGjK zhAhq#F*L~(Q2Vi7xh`LlXb&%`<|Om-5nd=(q0z4DwO)hc8>e$yBL}&GA+Z$O4!w@! ztr#zpOax32;jO{L_MqGsrMb>1V;VCvnR(MADefO*+lnXeuieCSD(I(@$m|Z%W9@`4 z3!?D0q*Gl0WNtAQJyrF$*LpC16{Fh#He_hyTNB6oFN*XGkv#DrDJ zrY+Pf)VnFcx-BY~j($9xYYeh&x=t{DMxK1;{e9_=Eqz<2o2I@^I)Dad*Mhb8ezTj=PvBC$Z$Hf9*bb zF8ZR9bfcq(``hFH0MTZ(_`O4$P-fSvOa|YW$rUw;j<#>)0$lc72GeHIQ@BBXABM1+ zh2uVrL%dR;g5&c&6n@0QQsdG5y<(EaC#8eY?H9nzb=^vn2uc6^9ChU?jClHa^$$i zWKf{S#g>Pd^KsNqBQZqlpG^u=-gwP>(vKpCJJ}}=n-*Y^Hlqy8!bp!)A9!VtPvOyH z)I#iKYSfu^#~rUxIVT))WEnLeA^FJn>D#L^i%Wl|H$l89F34^jFEX1bxge~8a+@Mf zneM66Q@5xC1BtcDY{n-=oBBfZi1U~tU>EsAYAyzLfEIg@@`Do7-B%{KXQBJ0!g($$Q_ z^O4>lUJ>|1bW1j&Ek?6)lZ{6{tw&*zFP$<{GGC`e((%*1da|>4D*ivTR(5AwTksa< zNR@kV;=NwF$*FQE(Urytp;na7nkb4}F3W-@!O?b4J@mwJ$4@$7+2AhRy&{(fMwK=k zik)UfM4OD;pi$Tsk+MWiy$Dqc@Kr@!X;*S|gq}$gr)gcVDKl(VrPVFz=u##$M+l{d zH;^jQrscZ#817iFBB!!iPU#2})Ro37a+b=^L1EiV+3EAnjy0%Fp)>2_3U zRHJxkHN`<;qGU!xLPmm!Xoay7B9dp8`!AN8StNJn+Fg12!jJIPi8loZPj`%hXr2L=-3rn z?y(N&uT|+OReD^YVwn|}V-rxBK_?qU-A83q$94FjZ><=WN-R>})FW12;U&XswR1af z+;H1$&OE5GXSM(`1Q#-IVNTWU*?UXX=cb^FU9Y6y^D8GOKy&QPPnTfQYn4dViB3hC z3Ab2i3j%3bkmwRF-J~c;N-4J9UY*r7)oxw!3^F~@`2PS+pX_u00BUF@sevqOf3Dy6 zDpljUuKR+F`dsTmrPP>h*o;%+HfF?t*cDApHz8exdq{k>^+bwP>0{){RY$S-c8lBj zliRC`)eai*?um3?QY6*}g3F|739xLQRc*vx)l@|2tMOfBZwj4>w$)!}fz|z-+S6)F z?Q4zO*Z%;BgqoEmp>JCpHH{c2FzhjUbe?ho5ULuhNGK<|DRfo%>m8(Hwz+^mXKKYd z;ixZz2~@h{EsJo;osJ?Z8mR+-s*0dwpi8$=UGefAeEz$U`3<^eYYnb2(Wo?9gh}*R za%ocQSq(-sSsTV`Kso~Iv$&xsmgq^kCM)Ef6G^B^IXVm~)?^nVz^Sk*uT%{fZIW{o zLUg+uCw3yhe8{#_HS<){H!vY0=VG-)yPmRHb~JY(JCo$vtBsLu=88y!U(an84t}y3 z+f=!enOdk+6h-N*GZnG~*$Cic9k(12j`cMA torch.Tensor: - """Convert the image data type to the Tensor (NCWH) data type supported by PyTorch - - Args: - image (np.ndarray): The image data read by ``OpenCV.imread``, the data range is [0,255] or [0, 1] - range_norm (bool): Scale [0, 1] data to between [-1, 1] - half (bool): Whether to convert torch.float32 similarly to torch.half type - - Returns: - tensor (torch.Tensor): Data types supported by PyTorch - - Examples: - >>> example_image = cv2.imread("example_image.bmp") - >>> example_tensor = image_to_tensor(example_image, False, False) - - """ - # Convert image data type to Tensor data type - tensor = F_vision.to_tensor(image) - - # Scale the image data from [0, 1] to [-1, 1] - if range_norm: - tensor = tensor.mul(2.0).sub(1.0) - - # Convert torch.float32 image data type to torch.half image data type - if half: - tensor = tensor.half() - - return tensor - - -def tensor_to_image(tensor: torch.Tensor, range_norm: bool, half: bool) -> Any: - """Convert the Tensor(NCWH) data type supported by PyTorch to the np.ndarray(WHC) image data type - - Args: - tensor (torch.Tensor): Data types supported by PyTorch (NCHW), the data range is [0, 1] - range_norm (bool): Scale [-1, 1] data to between [0, 1] - half (bool): Whether to convert torch.float32 similarly to torch.half type. - - Returns: - image (np.ndarray): Data types supported by PIL or OpenCV - - Examples: - >>> example_tensor = torch.randn([1,3, 256, 256], dtype=torch.float) - >>> example_image = tensor_to_image(example_tensor, False, False) - - """ - # Scale the image data from [-1, 1] to [0, 1] - if range_norm: - tensor = tensor.add(1.0).div(2.0) - - # Convert torch.float32 image data type to torch.half image data type - if half: - tensor = tensor.half() - - image = tensor.squeeze(0).permute(1, 2, 0).mul(255).clamp(0, 255).cpu().numpy().astype("uint8") - - return image - - -def center_crop( - images: ndarray | Tensor | list[ndarray] | list[Tensor], - patch_size: int, -) -> [ndarray] or [Tensor] or [list[ndarray]] or [list[Tensor]]: - if not isinstance(images, list): - images = [images] - - # Detect input image data type - input_type = "Tensor" if torch.is_tensor(images[0]) else "Numpy" - - if input_type == "Tensor": - image_height, image_width = images[0].size()[-2:] - else: - image_height, image_width = images[0].shape[0:2] - - # Calculate the start indices of the crop - top = (image_height - patch_size) // 2 - left = (image_width - patch_size) // 2 - - # Crop lr image patch - if input_type == "Tensor": - images = [image[ - :, - :, - top:top + patch_size, - left:left + patch_size] for image in images] - else: - images = [image[ - top:top + patch_size, - left:left + patch_size, - ...] for image in images] - - # When image number is 1 - if len(images) == 1: - images = images[0] - - return images - - -def random_crop( - images: ndarray | Tensor | list[ndarray] | list[Tensor], - patch_size: int, -) -> [ndarray] or [Tensor] or [list[ndarray]] or [list[Tensor]]: - if not isinstance(images, list): - images = [images] - - # Detect input image data type - input_type = "Tensor" if torch.is_tensor(images[0]) else "Numpy" - - if input_type == "Tensor": - image_height, image_width = images[0].size()[-2:] - else: - image_height, image_width = images[0].shape[0:2] - - # Just need to find the top and left coordinates of the image - top = random.randint(0, image_height - patch_size) - left = random.randint(0, image_width - patch_size) - - # Crop lr image patch - if input_type == "Tensor": - images = [image[ - :, - :, - top:top + patch_size, - left:left + patch_size] for image in images] - else: - images = [image[ - top:top + patch_size, - left:left + patch_size, - ...] for image in images] - - # When image number is 1 - if len(images) == 1: - images = images[0] - - return images - - -def random_rotate( - images: ndarray | Tensor | list[ndarray] | list[Tensor], - angles: list, - center: tuple = None, - rotate_scale_factor: float = 1.0 -) -> [ndarray] or [Tensor] or [list[ndarray]] or [list[Tensor]]: - # Random select specific angle - angle = random.choice(angles) - - if not isinstance(images, list): - images = [images] - - # Detect input image data type - input_type = "Tensor" if torch.is_tensor(images[0]) else "Numpy" - - if input_type == "Tensor": - image_height, image_width = images[0].size()[-2:] - else: - image_height, image_width = images[0].shape[0:2] - - # Rotate LR image - if center is None: - center = (image_width // 2, image_height // 2) - - matrix = cv2.getRotationMatrix2D(center, angle, rotate_scale_factor) - - if input_type == "Tensor": - images = [F_vision.rotate(image, angle, center=center) for image in images] - else: - images = [cv2.warpAffine(image, matrix, (image_width, image_height)) for image in images] - - # When image number is 1 - if len(images) == 1: - images = images[0] - - return images - - -def random_horizontally_flip( - images: ndarray | Tensor | list[ndarray] | list[Tensor], - p: float = 0.5 -) -> [ndarray] or [Tensor] or [list[ndarray]] or [list[Tensor]]: - # Get horizontal flip probability - flip_prob = random.random() - - if not isinstance(images, list): - images = [images] - - # Detect input image data type - input_type = "Tensor" if torch.is_tensor(images[0]) else "Numpy" - - if flip_prob > p: - if input_type == "Tensor": - images = [F_vision.hflip(image) for image in images] - else: - images = [cv2.flip(image, 1) for image in images] - - # When image number is 1 - if len(images) == 1: - images = images[0] - - return images - - -def random_vertically_flip( - images: ndarray | Tensor | list[ndarray] | list[Tensor], - p: float = 0.5 -) -> [ndarray] or [Tensor] or [list[ndarray]] or [list[Tensor]]: - # Get vertical flip probability - flip_prob = random.random() - - if not isinstance(images, list): - images = [images] - - # Detect input image data type - input_type = "Tensor" if torch.is_tensor(images[0]) else "Numpy" - - if flip_prob > p: - if input_type == "Tensor": - images = [F_vision.vflip(image) for image in images] - else: - images = [cv2.flip(image, 0) for image in images] - - # When image number is 1 - if len(images) == 1: - images = images[0] - - return images diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/inference.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/inference.py deleted file mode 100644 index bcf39858f..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/inference.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -import argparse -import json -import os - -import cv2 -import torch -from PIL import Image -from torch import nn -from torchvision.transforms import Resize, ConvertImageDtype, Normalize - -import imgproc -import model -from utils import load_state_dict - -model_names = sorted( - name for name in model.__dict__ if name.islower() and not name.startswith("__") and callable(model.__dict__[name])) - - -def load_class_label(class_label_file: str, num_classes: int) -> list: - class_label = json.load(open(class_label_file)) - class_label_list = [class_label[str(i)] for i in range(num_classes)] - - return class_label_list - - -def choice_device(device_type: str) -> torch.device: - # Select model processing equipment type - if device_type == "cuda": - device = torch.device("cuda", 0) - else: - device = torch.device("cpu") - return device - - -def build_model(model_arch_name: str, model_num_classes: int, device: torch.device) -> [nn.Module, nn.Module]: - resnet_model = model.__dict__[model_arch_name](num_classes=model_num_classes) - resnet_model = resnet_model.to(device=device, memory_format=torch.channels_last) - - return resnet_model - - -def preprocess_image(image_path: str, image_size: int, device: torch.device) -> torch.Tensor: - image = cv2.imread(image_path) - - # BGR to RGB - image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) - - # OpenCV convert PIL - image = Image.fromarray(image) - - # Resize to 224 - image = Resize([image_size, image_size])(image) - # Convert image data to pytorch format data - tensor = imgproc.image_to_tensor(image, False, False).unsqueeze_(0) - # Convert a tensor image to the given ``dtype`` and scale the values accordingly - tensor = ConvertImageDtype(torch.float)(tensor) - # Normalize a tensor image with mean and standard deviation. - tensor = Normalize(args.model_mean_parameters, args.model_std_parameters)(tensor) - - # Transfer tensor channel image format data to CUDA device - tensor = tensor.to(device=device, memory_format=torch.channels_last, non_blocking=True) - - return tensor - - -def main(): - # Get the label name corresponding to the drawing - class_label_map = load_class_label(args.class_label_file, args.model_num_classes) - - device = choice_device(args.device_type) - - # Initialize the model - resnet_model = build_model(args.model_arch_name, args.model_num_classes, device) - print(f"Build `{args.model_arch_name}` model successfully.") - - # Load model weights - resnet_model, _, _, _, _, _ = load_state_dict(resnet_model, args.model_weights_path) - print(f"Load `{args.model_arch_name}` model weights `{os.path.abspath(args.model_weights_path)}` successfully.") - - # Start the verification mode of the model. - resnet_model.eval() - - tensor = preprocess_image(args.image_path, args.image_size, device) - - # Inference - with torch.no_grad(): - output = resnet_model(tensor) - - # Calculate the five categories with the highest classification probability - prediction_class_index = torch.topk(output, k=5).indices.squeeze(0).tolist() - - # Print classification results - for class_index in prediction_class_index: - prediction_class_label = class_label_map[class_index] - prediction_class_prob = torch.softmax(output, dim=1)[0, class_index].item() - print(f"{prediction_class_label:<75} ({prediction_class_prob * 100:.2f}%)") - - -if __name__ == "__main__": - parser = argparse.ArgumentParser() - parser.add_argument("--model_arch_name", type=str, default="resnet18") - parser.add_argument("--model_mean_parameters", type=list, default=[0.485, 0.456, 0.406]) - parser.add_argument("--model_std_parameters", type=list, default=[0.229, 0.224, 0.225]) - parser.add_argument("--class_label_file", type=str, default="./data/ImageNet_1K_labels_map.txt") - parser.add_argument("--model_num_classes", type=int, default=1000) - parser.add_argument("--model_weights_path", type=str, default="./results/pretrained_models/ResNet18-ImageNet_1K-57bb63e.pth.tar") - parser.add_argument("--image_path", type=str, default="./figure/n01440764_36.JPEG") - parser.add_argument("--image_size", type=int, default=224) - parser.add_argument("--device_type", type=str, default="cpu", choices=["cpu", "cuda"]) - args = parser.parse_args() - - main() diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/model.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/model.py deleted file mode 100644 index 076c2ffbf..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/model.py +++ /dev/null @@ -1,252 +0,0 @@ -# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -from typing import Any, List, Type, Union, Optional - -import torch -from torch import Tensor -from torch import nn - -__all__ = [ - "ResNet", - "resnet18", -] - - -class _BasicBlock(nn.Module): - expansion: int = 1 - - def __init__( - self, - in_channels: int, - out_channels: int, - stride: int, - downsample: Optional[nn.Module] = None, - groups: int = 1, - base_channels: int = 64, - ) -> None: - super(_BasicBlock, self).__init__() - self.stride = stride - self.downsample = downsample - self.groups = groups - self.base_channels = base_channels - - self.conv1 = nn.Conv2d(in_channels, out_channels, (3, 3), (stride, stride), (1, 1), bias=False) - self.bn1 = nn.BatchNorm2d(out_channels) - self.relu = nn.ReLU(True) - self.conv2 = nn.Conv2d(out_channels, out_channels, (3, 3), (1, 1), (1, 1), bias=False) - self.bn2 = nn.BatchNorm2d(out_channels) - - def forward(self, x: Tensor) -> Tensor: - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - - if self.downsample is not None: - identity = self.downsample(x) - - out = torch.add(out, identity) - out = self.relu(out) - - return out - - -class _Bottleneck(nn.Module): - expansion: int = 4 - - def __init__( - self, - in_channels: int, - out_channels: int, - stride: int, - downsample: Optional[nn.Module] = None, - groups: int = 1, - base_channels: int = 64, - ) -> None: - super(_Bottleneck, self).__init__() - self.stride = stride - self.downsample = downsample - self.groups = groups - self.base_channels = base_channels - - channels = int(out_channels * (base_channels / 64.0)) * groups - - self.conv1 = nn.Conv2d(in_channels, channels, (1, 1), (1, 1), (0, 0), bias=False) - self.bn1 = nn.BatchNorm2d(channels) - self.conv2 = nn.Conv2d(channels, channels, (3, 3), (stride, stride), (1, 1), groups=groups, bias=False) - self.bn2 = nn.BatchNorm2d(channels) - self.conv3 = nn.Conv2d(channels, int(out_channels * self.expansion), (1, 1), (1, 1), (0, 0), bias=False) - self.bn3 = nn.BatchNorm2d(int(out_channels * self.expansion)) - self.relu = nn.ReLU(True) - - def forward(self, x: Tensor) -> Tensor: - identity = x - - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - - out = self.conv2(out) - out = self.bn2(out) - out = self.relu(out) - - out = self.conv3(out) - out = self.bn3(out) - - if self.downsample is not None: - identity = self.downsample(x) - - out = torch.add(out, identity) - out = self.relu(out) - - return out - - -class ResNet(nn.Module): - - def __init__( - self, - arch_cfg: List[int], - block: Type[Union[_BasicBlock, _Bottleneck]], - groups: int = 1, - channels_per_group: int = 64, - num_classes: int = 1000, - ) -> None: - super(ResNet, self).__init__() - self.in_channels = 64 - self.dilation = 1 - self.groups = groups - self.base_channels = channels_per_group - - self.conv1 = nn.Conv2d(3, self.in_channels, (7, 7), (2, 2), (3, 3), bias=False) - self.bn1 = nn.BatchNorm2d(self.in_channels) - self.relu = nn.ReLU(True) - self.maxpool = nn.MaxPool2d((3, 3), (2, 2), (1, 1)) - - self.layer1 = self._make_layer(arch_cfg[0], block, 64, 1) - self.layer2 = self._make_layer(arch_cfg[1], block, 128, 2) - self.layer3 = self._make_layer(arch_cfg[2], block, 256, 2) - self.layer4 = self._make_layer(arch_cfg[3], block, 512, 2) - - self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) - - self.fc = nn.Linear(512 * block.expansion, num_classes) - - # Initialize neural network weights - self._initialize_weights() - - def _make_layer( - self, - repeat_times: int, - block: Type[Union[_BasicBlock, _Bottleneck]], - channels: int, - stride: int = 1, - ) -> nn.Sequential: - downsample = None - - if stride != 1 or self.in_channels != channels * block.expansion: - downsample = nn.Sequential( - nn.Conv2d(self.in_channels, channels * block.expansion, (1, 1), (stride, stride), (0, 0), bias=False), - nn.BatchNorm2d(channels * block.expansion), - ) - - layers = [ - block( - self.in_channels, - channels, - stride, - downsample, - self.groups, - self.base_channels - ) - ] - self.in_channels = channels * block.expansion - for _ in range(1, repeat_times): - layers.append( - block( - self.in_channels, - channels, - 1, - None, - self.groups, - self.base_channels, - ) - ) - - return nn.Sequential(*layers) - - def forward(self, x: Tensor) -> Tensor: - out = self._forward_impl(x) - - return out - - # Support torch.script function - def _forward_impl(self, x: Tensor) -> Tensor: - out = self.conv1(x) - out = self.bn1(out) - out = self.relu(out) - out = self.maxpool(out) - - out = self.layer1(out) - out = self.layer2(out) - out = self.layer3(out) - out = self.layer4(out) - - out = self.avgpool(out) - out = torch.flatten(out, 1) - out = self.fc(out) - - return out - - def _initialize_weights(self) -> None: - for module in self.modules(): - if isinstance(module, nn.Conv2d): - nn.init.kaiming_normal_(module.weight, mode="fan_out", nonlinearity="relu") - elif isinstance(module, (nn.BatchNorm2d, nn.GroupNorm)): - nn.init.constant_(module.weight, 1) - nn.init.constant_(module.bias, 0) - - -def resnet18(**kwargs: Any) -> ResNet: - model = ResNet([2, 2, 2, 2], _BasicBlock, **kwargs) - - return model - - -def resnet34(**kwargs: Any) -> ResNet: - model = ResNet([3, 4, 6, 3], _BasicBlock, **kwargs) - - return model - - -def resnet50(**kwargs: Any) -> ResNet: - model = ResNet([3, 4, 6, 3], _Bottleneck, **kwargs) - - return model - - -def resnet101(**kwargs: Any) -> ResNet: - model = ResNet([3, 4, 23, 3], _Bottleneck, **kwargs) - - return model - - -def resnet152(**kwargs: Any) -> ResNet: - model = ResNet([3, 8, 36, 3], _Bottleneck, **kwargs) - - return model diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/requirements.txt b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/requirements.txt deleted file mode 100644 index c334c0188..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -Pillow==10.3.0 -torch==2.7.1 -torchvision==0.13.1+cu116 -numpy==1.23.1 -opencv-python==4.8.1.78 \ No newline at end of file diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_imagenet.sh b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_imagenet.sh deleted file mode 100644 index 5f04f9212..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_imagenet.sh +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -# Process ImageNet_1K train dataset -# shellcheck disable=SC2164 -cd ../data/ImageNet_1K/ILSVRC2012_img_train -tar -xvf ILSVRC2012_img_train.tar -rm ILSVRC2012_img_train.tar -# shellcheck disable=SC2162 -find . -name "*.tar" | while read NAME ; do mkdir -p "${NAME%.tar}"; tar -xvf "${NAME}" -C "${NAME%.tar}"; rm -f "${NAME}"; done -# shellcheck disable=SC2035 -cd ../../scripts - -# Process ImageNet_1K valid dataset -# shellcheck disable=SC2164 -cd ../data/ImageNet_1K/ILSVRC2012_img_val -tar -xvf ILSVRC2012_img_val.tar -bash valprep.sh -# shellcheck disable=SC2035 -rm *.JPEG -# shellcheck disable=SC2035 -rm *.sh -cd ../../scripts \ No newline at end of file diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_mini_imagenet.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_mini_imagenet.py deleted file mode 100644 index a7e19e445..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/scripts/preprocess_mini_imagenet.py +++ /dev/null @@ -1,72 +0,0 @@ -# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -import csv -import os - -from PIL import Image - -train_csv_path = "../data/MiniImageNet_1K/original/train.csv" -valid_csv_path = "../data/MiniImageNet_1K/original/valid.csv" -test_csv_path = "../data/MiniImageNet_1K/original/test.csv" - -inputs_images_dir = "../data/MiniImageNet_1K/original/mini_imagenet/images" -output_images_dir = "../data/MiniImageNet_1K/" - -train_label = {} -val_label = {} -test_label = {} -with open(train_csv_path) as csvfile: - csv_reader = csv.reader(csvfile) - birth_header = next(csv_reader) - for row in csv_reader: - train_label[row[0]] = row[1] - -with open(valid_csv_path) as csvfile: - csv_reader = csv.reader(csvfile) - birth_header = next(csv_reader) - for row in csv_reader: - val_label[row[0]] = row[1] - -with open(test_csv_path) as csvfile: - csv_reader = csv.reader(csvfile) - birth_header = next(csv_reader) - for row in csv_reader: - test_label[row[0]] = row[1] - -for png in os.listdir(inputs_images_dir): - path = inputs_images_dir + "/" + png - im = Image.open(path) - if png in train_label.keys(): - tmp = train_label[png] - temp_path = output_images_dir + "/train" + "/" + tmp - if not os.path.exists(temp_path): - os.makedirs(temp_path) - t = temp_path + "/" + png - im.save(t) - - elif png in val_label.keys(): - tmp = val_label[png] - temp_path = output_images_dir + "/valid" + "/" + tmp - if not os.path.exists(temp_path): - os.makedirs(temp_path) - t = temp_path + "/" + png - im.save(t) - - elif png in test_label.keys(): - tmp = test_label[png] - temp_path = output_images_dir + "/test" + "/" + tmp - if not os.path.exists(temp_path): - os.makedirs(temp_path) - t = temp_path + "/" + png - im.save(t) diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/test.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/test.py deleted file mode 100644 index 07f81f743..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/test.py +++ /dev/null @@ -1,127 +0,0 @@ -# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -import os -import time - -import torch -from torch import nn -from torch.utils.data import DataLoader - -import config -import model -from dataset import CUDAPrefetcher, ImageDataset -from utils import load_state_dict, accuracy, Summary, AverageMeter, ProgressMeter - -model_names = sorted( - name for name in model.__dict__ if name.islower() and not name.startswith("__") and callable(model.__dict__[name])) - - -def build_model() -> nn.Module: - resnet_model = model.__dict__[config.model_arch_name](num_classes=config.model_num_classes) - resnet_model = resnet_model.to(device=config.device, memory_format=torch.channels_last) - - return resnet_model - - -def load_dataset() -> CUDAPrefetcher: - test_dataset = ImageDataset(config.test_image_dir, - config.image_size, - config.model_mean_parameters, - config.model_std_parameters, - "Test") - test_dataloader = DataLoader(test_dataset, - batch_size=config.batch_size, - shuffle=False, - num_workers=config.num_workers, - pin_memory=True, - drop_last=False, - persistent_workers=True) - - # Place all data on the preprocessing data loader - test_prefetcher = CUDAPrefetcher(test_dataloader, config.device) - - return test_prefetcher - - -def main() -> None: - # Initialize the model - resnet_model = build_model() - print(f"Build `{config.model_arch_name}` model successfully.") - - # Load model weights - resnet_model, _, _, _, _, _ = load_state_dict(resnet_model, config.model_weights_path) - print(f"Load `{config.model_arch_name}` " - f"model weights `{os.path.abspath(config.model_weights_path)}` successfully.") - - # Start the verification mode of the model. - resnet_model.eval() - - # Load test dataloader - test_prefetcher = load_dataset() - - # Calculate how many batches of data are in each Epoch - batches = len(test_prefetcher) - batch_time = AverageMeter("Time", ":6.3f", Summary.NONE) - acc1 = AverageMeter("Acc@1", ":6.2f", Summary.AVERAGE) - acc5 = AverageMeter("Acc@5", ":6.2f", Summary.AVERAGE) - progress = ProgressMeter(batches, [batch_time, acc1, acc5], prefix=f"Test: ") - - # Initialize the number of data batches to print logs on the terminal - batch_index = 0 - - # Initialize the data loader and load the first batch of data - test_prefetcher.reset() - batch_data = test_prefetcher.next() - - # Get the initialization test time - end = time.time() - - with torch.no_grad(): - while batch_data is not None: - # Transfer in-memory data to CUDA devices to speed up training - images = batch_data["image"].to(device=config.device, non_blocking=True) - target = batch_data["target"].to(device=config.device, non_blocking=True) - - # Get batch size - batch_size = images.size(0) - - # Inference - output = resnet_model(images) - - # measure accuracy and record loss - top1, top5 = accuracy(output, target, topk=(1, 5)) - acc1.update(top1[0].item(), batch_size) - acc5.update(top5[0].item(), batch_size) - - # Calculate the time it takes to fully train a batch of data - batch_time.update(time.time() - end) - end = time.time() - - # Write the data during training to the training log file - if batch_index % config.test_print_frequency == 0: - progress.display(batch_index + 1) - - # Preload the next batch of data - batch_data = test_prefetcher.next() - - # Add 1 to the number of data batches to ensure that the terminal prints data normally - batch_index += 1 - - # print metrics - print(f"Acc@1 error: {100 - acc1.avg:.2f}%") - print(f"Acc@5 error: {100 - acc5.avg:.2f}%") - - -if __name__ == "__main__": - main() diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/train.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/train.py deleted file mode 100644 index 5aa2868a8..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/train.py +++ /dev/null @@ -1,350 +0,0 @@ -# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -import os -import time - -import torch -from torch import nn -from torch import optim -from torch.cuda import amp -from torch.optim import lr_scheduler -from torch.optim.swa_utils import AveragedModel -from torch.utils.data import DataLoader -from torch.utils.tensorboard import SummaryWriter - -import config -import model -from dataset import CUDAPrefetcher, ImageDataset -from utils import accuracy, load_state_dict, make_directory, save_checkpoint, Summary, AverageMeter, ProgressMeter - -model_names = sorted( - name for name in model.__dict__ if name.islower() and not name.startswith("__") and callable(model.__dict__[name])) - - -def main(): - # Initialize the number of training epochs - start_epoch = 0 - - # Initialize training network evaluation indicators - best_acc1 = 0.0 - - train_prefetcher, valid_prefetcher = load_dataset() - print(f"Load `{config.model_arch_name}` datasets successfully.") - - resnet_model, ema_resnet_model = build_model() - print(f"Build `{config.model_arch_name}` model successfully.") - - pixel_criterion = define_loss() - print("Define all loss functions successfully.") - - optimizer = define_optimizer(resnet_model) - print("Define all optimizer functions successfully.") - - scheduler = define_scheduler(optimizer) - print("Define all optimizer scheduler functions successfully.") - - print("Check whether to load pretrained model weights...") - if config.pretrained_model_weights_path: - resnet_model, ema_resnet_model, start_epoch, best_acc1, optimizer, scheduler = load_state_dict( - resnet_model, - config.pretrained_model_weights_path, - ema_resnet_model, - start_epoch, - best_acc1, - optimizer, - scheduler) - print(f"Loaded `{config.pretrained_model_weights_path}` pretrained model weights successfully.") - else: - print("Pretrained model weights not found.") - - print("Check whether the pretrained model is restored...") - if config.resume: - resnet_model, ema_resnet_model, start_epoch, best_acc1, optimizer, scheduler = load_state_dict( - resnet_model, - config.pretrained_model_weights_path, - ema_resnet_model, - start_epoch, - best_acc1, - optimizer, - scheduler, - "resume") - print("Loaded pretrained generator model weights.") - else: - print("Resume training model not found. Start training from scratch.") - - # Create a experiment results - samples_dir = os.path.join("samples", config.exp_name) - results_dir = os.path.join("results", config.exp_name) - make_directory(samples_dir) - make_directory(results_dir) - - # Create training process log file - writer = SummaryWriter(os.path.join("samples", "logs", config.exp_name)) - - # Initialize the gradient scaler - scaler = amp.GradScaler() - - for epoch in range(start_epoch, config.epochs): - train(resnet_model, ema_resnet_model, train_prefetcher, pixel_criterion, optimizer, epoch, scaler, writer) - acc1 = validate(ema_resnet_model, valid_prefetcher, epoch, writer, "Valid") - print("\n") - - # Update LR - scheduler.step() - - # Automatically save the model with the highest index - is_best = acc1 > best_acc1 - is_last = (epoch + 1) == config.epochs - best_acc1 = max(acc1, best_acc1) - save_checkpoint({"epoch": epoch + 1, - "best_acc1": best_acc1, - "state_dict": resnet_model.state_dict(), - "ema_state_dict": ema_resnet_model.state_dict(), - "optimizer": optimizer.state_dict(), - "scheduler": scheduler.state_dict()}, - f"epoch_{epoch + 1}.pth.tar", - samples_dir, - results_dir, - is_best, - is_last) - - -def load_dataset() -> [CUDAPrefetcher, CUDAPrefetcher]: - # Load train, test and valid datasets - train_dataset = ImageDataset(config.train_image_dir, - config.image_size, - config.model_mean_parameters, - config.model_std_parameters, - "Train") - valid_dataset = ImageDataset(config.valid_image_dir, - config.image_size, - config.model_mean_parameters, - config.model_std_parameters, - "Valid") - - # Generator all dataloader - train_dataloader = DataLoader(train_dataset, - batch_size=config.batch_size, - shuffle=True, - num_workers=config.num_workers, - pin_memory=True, - drop_last=True, - persistent_workers=True) - valid_dataloader = DataLoader(valid_dataset, - batch_size=config.batch_size, - shuffle=False, - num_workers=config.num_workers, - pin_memory=True, - drop_last=False, - persistent_workers=True) - - # Place all data on the preprocessing data loader - train_prefetcher = CUDAPrefetcher(train_dataloader, config.device) - valid_prefetcher = CUDAPrefetcher(valid_dataloader, config.device) - - return train_prefetcher, valid_prefetcher - - -def build_model() -> [nn.Module, nn.Module]: - resnet_model = model.__dict__[config.model_arch_name](num_classes=config.model_num_classes) - resnet_model = resnet_model.to(device=config.device, memory_format=torch.channels_last) - - ema_avg = lambda averaged_model_parameter, model_parameter, num_averaged: (1 - config.model_ema_decay) * averaged_model_parameter + config.model_ema_decay * model_parameter - ema_resnet_model = AveragedModel(resnet_model, avg_fn=ema_avg) - - return resnet_model, ema_resnet_model - - -def define_loss() -> nn.CrossEntropyLoss: - criterion = nn.CrossEntropyLoss(label_smoothing=config.loss_label_smoothing) - criterion = criterion.to(device=config.device, memory_format=torch.channels_last) - - return criterion - - -def define_optimizer(model) -> optim.SGD: - optimizer = optim.SGD(model.parameters(), - lr=config.model_lr, - momentum=config.model_momentum, - weight_decay=config.model_weight_decay) - - return optimizer - - -def define_scheduler(optimizer: optim.SGD) -> lr_scheduler.CosineAnnealingWarmRestarts: - scheduler = lr_scheduler.CosineAnnealingWarmRestarts(optimizer, - config.lr_scheduler_T_0, - config.lr_scheduler_T_mult, - config.lr_scheduler_eta_min) - - return scheduler - - -def train( - model: nn.Module, - ema_model: nn.Module, - train_prefetcher: CUDAPrefetcher, - criterion: nn.CrossEntropyLoss, - optimizer: optim.Adam, - epoch: int, - scaler: amp.GradScaler, - writer: SummaryWriter -) -> None: - # Calculate how many batches of data are in each Epoch - batches = len(train_prefetcher) - # Print information of progress bar during training - batch_time = AverageMeter("Time", ":6.3f") - data_time = AverageMeter("Data", ":6.3f") - losses = AverageMeter("Loss", ":6.6f") - acc1 = AverageMeter("Acc@1", ":6.2f") - acc5 = AverageMeter("Acc@5", ":6.2f") - progress = ProgressMeter(batches, - [batch_time, data_time, losses, acc1, acc5], - prefix=f"Epoch: [{epoch + 1}]") - - # Put the generative network model in training mode - model.train() - - # Initialize the number of data batches to print logs on the terminal - batch_index = 0 - - # Initialize the data loader and load the first batch of data - train_prefetcher.reset() - batch_data = train_prefetcher.next() - - # Get the initialization training time - end = time.time() - - while batch_data is not None: - # Calculate the time it takes to load a batch of data - data_time.update(time.time() - end) - - # Transfer in-memory data to CUDA devices to speed up training - images = batch_data["image"].to(device=config.device, memory_format=torch.channels_last, non_blocking=True) - target = batch_data["target"].to(device=config.device, non_blocking=True) - - # Get batch size - batch_size = images.size(0) - - # Initialize generator gradients - model.zero_grad(set_to_none=True) - - # Mixed precision training - with amp.autocast(): - output = model(images) - loss = config.loss_weights * criterion(output, target) - - # Backpropagation - scaler.scale(loss).backward() - # update generator weights - scaler.step(optimizer) - scaler.update() - - # Update EMA - ema_model.update_parameters(model) - - # measure accuracy and record loss - top1, top5 = accuracy(output, target, topk=(1, 5)) - losses.update(loss.item(), batch_size) - acc1.update(top1[0].item(), batch_size) - acc5.update(top5[0].item(), batch_size) - - # Calculate the time it takes to fully train a batch of data - batch_time.update(time.time() - end) - end = time.time() - - # Write the data during training to the training log file - if batch_index % config.train_print_frequency == 0: - # Record loss during training and output to file - writer.add_scalar("Train/Loss", loss.item(), batch_index + epoch * batches + 1) - progress.display(batch_index + 1) - - # Preload the next batch of data - batch_data = train_prefetcher.next() - - # Add 1 to the number of data batches to ensure that the terminal prints data normally - batch_index += 1 - - -def validate( - ema_model: nn.Module, - data_prefetcher: CUDAPrefetcher, - epoch: int, - writer: SummaryWriter, - mode: str -) -> float: - # Calculate how many batches of data are in each Epoch - batches = len(data_prefetcher) - batch_time = AverageMeter("Time", ":6.3f", Summary.NONE) - acc1 = AverageMeter("Acc@1", ":6.2f", Summary.AVERAGE) - acc5 = AverageMeter("Acc@5", ":6.2f", Summary.AVERAGE) - progress = ProgressMeter(batches, [batch_time, acc1, acc5], prefix=f"{mode}: ") - - # Put the exponential moving average model in the verification mode - ema_model.eval() - - # Initialize the number of data batches to print logs on the terminal - batch_index = 0 - - # Initialize the data loader and load the first batch of data - data_prefetcher.reset() - batch_data = data_prefetcher.next() - - # Get the initialization test time - end = time.time() - - with torch.no_grad(): - while batch_data is not None: - # Transfer in-memory data to CUDA devices to speed up training - images = batch_data["image"].to(device=config.device, memory_format=torch.channels_last, non_blocking=True) - target = batch_data["target"].to(device=config.device, non_blocking=True) - - # Get batch size - batch_size = images.size(0) - - # Inference - output = ema_model(images) - - # measure accuracy and record loss - top1, top5 = accuracy(output, target, topk=(1, 5)) - acc1.update(top1[0].item(), batch_size) - acc5.update(top5[0].item(), batch_size) - - # Calculate the time it takes to fully train a batch of data - batch_time.update(time.time() - end) - end = time.time() - - # Write the data during training to the training log file - if batch_index % config.valid_print_frequency == 0: - progress.display(batch_index + 1) - - # Preload the next batch of data - batch_data = data_prefetcher.next() - - # Add 1 to the number of data batches to ensure that the terminal prints data normally - batch_index += 1 - - # print metrics - progress.display_summary() - - if mode == "Valid" or mode == "Test": - writer.add_scalar(f"{mode}/Acc@1", acc1.avg, epoch + 1) - else: - raise ValueError("Unsupported mode, please use `Valid` or `Test`.") - - return acc1.avg - - -if __name__ == "__main__": - main() diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/utils.py b/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/utils.py deleted file mode 100644 index 64a248571..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/ResNet-PyTorch/utils.py +++ /dev/null @@ -1,198 +0,0 @@ -# Copyright 2022 Dakewe Biotech Corporation. All Rights Reserved. -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== -import os -import shutil -from enum import Enum -from typing import Any, Dict, TypeVar, Optional - -import torch -from torch import nn - -__all__ = [ - "accuracy", "load_state_dict", "make_directory", "ovewrite_named_param", "make_divisible", "save_checkpoint", - "Summary", "AverageMeter", "ProgressMeter" -] - -V = TypeVar("V") - - -def accuracy(output, target, topk=(1,)): - """Computes the accuracy over the k top predictions for the specified values of k""" - with torch.no_grad(): - maxk = max(topk) - batch_size = target.size(0) - - _, pred = output.topk(maxk, 1, True, True) - pred = pred.t() - correct = pred.eq(target.view(1, -1).expand_as(pred)) - - results = [] - for k in topk: - correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True) - results.append(correct_k.mul_(100.0 / batch_size)) - return results - - -def load_state_dict( - model: nn.Module, - model_weights_path: str, - ema_model: nn.Module = None, - start_epoch: int = None, - best_acc1: float = None, - optimizer: torch.optim.Optimizer = None, - scheduler: torch.optim.lr_scheduler = None, - load_mode: str = None, -) -> [nn.Module, nn.Module, str, int, float, torch.optim.Optimizer, torch.optim.lr_scheduler]: - # Load model weights - checkpoint = torch.load(model_weights_path, map_location=lambda storage, loc: storage) - - if load_mode == "resume": - # Restore the parameters in the training node to this point - start_epoch = checkpoint["epoch"] - best_acc1 = checkpoint["best_acc1"] - # Load model state dict. Extract the fitted model weights - model_state_dict = model.state_dict() - state_dict = {k: v for k, v in checkpoint["state_dict"].items() if k in model_state_dict.keys()} - # Overwrite the model weights to the current model (base model) - model_state_dict.update(state_dict) - model.load_state_dict(model_state_dict) - # Load ema model state dict. Extract the fitted model weights - ema_model_state_dict = ema_model.state_dict() - ema_state_dict = {k: v for k, v in checkpoint["ema_state_dict"].items() if k in ema_model_state_dict.keys()} - # Overwrite the model weights to the current model (ema model) - ema_model_state_dict.update(ema_state_dict) - ema_model.load_state_dict(ema_model_state_dict) - # Load the optimizer model - optimizer.load_state_dict(checkpoint["optimizer"]) - # Load the scheduler model - scheduler.load_state_dict(checkpoint["scheduler"]) - else: - # Load model state dict. Extract the fitted model weights - model_state_dict = model.state_dict() - state_dict = {k: v for k, v in checkpoint["state_dict"].items() if - k in model_state_dict.keys() and v.size() == model_state_dict[k].size()} - # Overwrite the model weights to the current model - model_state_dict.update(state_dict) - model.load_state_dict(model_state_dict) - - return model, ema_model, start_epoch, best_acc1, optimizer, scheduler - - -def make_directory(dir_path: str) -> None: - if not os.path.exists(dir_path): - os.makedirs(dir_path) - - -def ovewrite_named_param(kwargs: Dict[str, Any], param: str, new_value: V) -> None: - if param in kwargs: - if kwargs[param] != new_value: - raise ValueError(f"The parameter '{param}' expected value {new_value} but got {kwargs[param]} instead.") - else: - kwargs[param] = new_value - - -def make_divisible(v: float, divisor: int, min_value: Optional[int] = None) -> int: - """Copy from: https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py - """ - if min_value is None: - min_value = divisor - new_v = max(min_value, int(v + divisor / 2) // divisor * divisor) - # Make sure that round down does not go down by more than 10%. - if new_v < 0.9 * v: - new_v += divisor - return new_v - - -def save_checkpoint( - state_dict: dict, - file_name: str, - samples_dir: str, - results_dir: str, - is_best: bool = False, - is_last: bool = False, -) -> None: - checkpoint_path = os.path.join(samples_dir, file_name) - torch.save(state_dict, checkpoint_path) - - if is_best: - shutil.copyfile(checkpoint_path, os.path.join(results_dir, "best.pth.tar")) - if is_last: - shutil.copyfile(checkpoint_path, os.path.join(results_dir, "last.pth.tar")) - - -class Summary(Enum): - NONE = 0 - AVERAGE = 1 - SUM = 2 - COUNT = 3 - - -class AverageMeter(object): - def __init__(self, name, fmt=":f", summary_type=Summary.AVERAGE): - self.name = name - self.fmt = fmt - self.summary_type = summary_type - self.reset() - - def reset(self): - self.val = 0 - self.avg = 0 - self.sum = 0 - self.count = 0 - - def update(self, val, n=1): - self.val = val - self.sum += val * n - self.count += n - self.avg = self.sum / self.count - - def __str__(self): - fmtstr = "{name} {val" + self.fmt + "} ({avg" + self.fmt + "})" - return fmtstr.format(**self.__dict__) - - def summary(self): - if self.summary_type is Summary.NONE: - fmtstr = "" - elif self.summary_type is Summary.AVERAGE: - fmtstr = "{name} {avg:.2f}" - elif self.summary_type is Summary.SUM: - fmtstr = "{name} {sum:.2f}" - elif self.summary_type is Summary.COUNT: - fmtstr = "{name} {count:.2f}" - else: - raise ValueError(f"Invalid summary type {self.summary_type}") - - return fmtstr.format(**self.__dict__) - - -class ProgressMeter(object): - def __init__(self, num_batches, meters, prefix=""): - self.batch_fmtstr = self._get_batch_fmtstr(num_batches) - self.meters = meters - self.prefix = prefix - - def display(self, batch): - entries = [self.prefix + self.batch_fmtstr.format(batch)] - entries += [str(meter) for meter in self.meters] - print("\t".join(entries)) - - def display_summary(self): - entries = [" *"] - entries += [meter.summary() for meter in self.meters] - print(" ".join(entries)) - - def _get_batch_fmtstr(self, num_batches): - num_digits = len(str(num_batches // 1)) - fmt = "{:" + str(num_digits) + "d}" - return "[" + fmt + "/" + fmt.format(num_batches) + "]" diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/coverage.txt b/PyTorch/build-in/Classification/ResNetV1bV1_5/coverage.txt deleted file mode 100644 index ba5c5dd79..000000000 --- a/PyTorch/build-in/Classification/ResNetV1bV1_5/coverage.txt +++ /dev/null @@ -1,3 +0,0 @@ -all api: ['_amp_foreach_non_finite_check_and_unscale_', '_amp_update_scale_', '_copy_from', '_has_compatible_shallow_copy_type', '_local_scalar_dense', '_log_softmax', '_log_softmax_backward_data', '_pin_memory', '_reshape_alias', 'add', 'add_', 'addmm', 'as_strided', 'as_strided_', 'convolution', 'convolution_backward', 'copy_stride', 'div', 'eq', 'fill_', 'fused_sgd', 'is_pinned', 'linear', 'max_pool2d', 'maxpool2d_backward', 'maxpool2d_forward', 'mean', 'mm', 'mul', 'mul_', 'native_batch_norm', 'native_batch_norm_backward', 'nll_loss_backward', 'nll_loss_forward', 'reciprocal', 'relu_', 'set_', 'sum', 'threshold_backward', 'topk_out', 'view', 'zero_'], total: 42 -fallback op: [], total: 0 -coverage rate: 100.00% diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/readme b/PyTorch/build-in/Classification/ResNetV1bV1_5/readme new file mode 100644 index 000000000..aefa5f160 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/readme @@ -0,0 +1,65 @@ +```markdown +## 1. 模型链接 +- 原始仓库链接: +https://github.com/huggingface/pytorch-image-models?tab=readme-ov-file#models + +## 2. 快速开始 + +使用本模型执行训练的主要流程如下: + +1. **基础环境安装**:介绍训练前需要完成的基础环境检查和安装。 +2. **获取数据集**:介绍如何获取训练所需的数据集。 +3. **构建环境**:介绍如何构建模型运行所需要的环境。 +4. **启动训练**:介绍如何运行训练。 + +### 2.1 基础环境安装 + +请参考主仓库的基础环境安装章节,完成训练前的基础环境检查和安装(如驱动、固件等)。 + +### 2.2 准备数据集 + +#### 2.2.1 获取数据集 + +训练使用 **CIFAR-100** 数据集。该数据集为开源数据集,包含 100 个类别的 60000 张彩色图像。 + +#### 2.2.2 处理数据集 + +请确保数据集已下载并解压。根据训练脚本的默认配置,建议将数据集存放在模型目录的上级 `data` 目录中(即 `../data`),或者根据实际路径修改训练命令中的 `--datapath` 参数。 + +### 2.3 构建环境 + +所使用的环境下需包含 PyTorch 框架虚拟环境。 + +1. 执行以下命令,启动虚拟环境(根据实际环境名称修改): + + ```bash + conda activate torch_env_py310 + +``` + +2. 安装 Python 依赖。确保已安装项目所需的依赖包: +```bash +pip install -r requirements_exact.txt + +``` + + + +### 2.4 启动训练 + +1. 在构建好的环境中,进入模型训练脚本所在目录。 + +2. 运行训练。该模型支持单机单卡训练。 +执行以下命令启动训练(使用 CIFAR-100 数据集,Batch Size 为 128): +```bash +python weloTrainStep.py \ + --name train \ + --arch resnet \ + --print_freq 1 \ + --steps 100 \ + --dataset cifar100 \ + --datapath ../data \ + --batch_size 32 \ + --epochs 100 + +``` diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/requirements_exact.txt b/PyTorch/build-in/Classification/ResNetV1bV1_5/requirements_exact.txt new file mode 100644 index 000000000..7394b3319 --- /dev/null +++ b/PyTorch/build-in/Classification/ResNetV1bV1_5/requirements_exact.txt @@ -0,0 +1,89 @@ +addict==2.4.0 +aliyun-python-sdk-core==2.16.0 +aliyun-python-sdk-kms==2.16.5 +anyio==4.11.0 +astunparse==1.6.3 +certifi==2024.12.14 +cffi==2.0.0 +charset-normalizer==3.4.1 +click==8.3.1 +colorama==0.4.6 +contourpy==1.3.2 +crcmod==1.7 +cryptography==46.0.3 +cycler==0.12.1 +einops==0.8.1 +exceptiongroup==1.3.1 +filelock==3.14.0 +fonttools==4.60.1 +fsspec==2024.12.0 +future @ file:///croot/future_1730902796226/work +git-filter-repo==2.47.0 +h11==0.16.0 +hf-xet==1.2.0 +httpcore==1.0.9 +httpx==0.28.1 +huggingface_hub==1.1.5 +idna==3.10 +inplace-abn @ git+https://github.com/mapillary/inplace_abn.git@b50bfe9c7cd7116a3ab091a352b48d6ba5ee701c +Jinja2==3.1.5 +jmespath==0.10.0 +joblib==1.5.2 +kiwisolver==1.4.9 +Markdown==3.10 +markdown-it-py==4.0.0 +MarkupSafe==3.0.2 +matplotlib==3.10.7 +mdurl==0.1.2 +mmdet==3.3.0 +mmengine==0.10.7 +model-index==0.1.11 +mpmath==1.3.0 +networkx==3.4.2 +numpy==1.23.5 +opencv-python==4.12.0.88 +opendatalab==0.0.10 +openmim==0.3.9 +openxlab==0.1.3 +ordered-set==4.1.0 +oss2==2.17.0 +packaging @ file:///croot/packaging_1734472117206/work +pandas==2.3.3 +pillow==11.1.0 +platformdirs==4.5.1 +pycocotools==2.0.11 +pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work +pycryptodome==3.23.0 +Pygments==2.19.2 +pyparsing==3.2.5 +python-dateutil==2.9.0.post0 +pytz==2023.4 +PyYAML @ file:///croot/pyyaml_1728657952215/work +requests==2.28.2 +rich==13.4.2 +safetensors==0.7.0 +scikit-learn==1.7.2 +scipy==1.15.3 +shapely==2.1.2 +shellingham==1.5.4 +six @ file:///tmp/build/80754af9/six_1644875935023/work +sniffio==1.3.1 +sympy==1.13.3 +tabulate==0.9.0 +termcolor==3.2.0 +terminaltables==3.1.10 +threadpoolctl==3.6.0 +timm==1.0.22 +tomli==2.3.0 +torch @ file:///apps/torch-2.4.0a0%2Bgit4451b0e-cp310-cp310-linux_x86_64.whl#sha256=2e472c916044cac5a1a0e0d8b0e12bb943d8522b24ff826c8014dd444dccd378 +torch_sdaa @ file:///apps/torch_sdaa-2.0.0-cp310-cp310-linux_x86_64.whl#sha256=5aa57889b002e1231fbf806642e1353bfa016297bc25178396e89adc2b1f92e7 +torchaudio @ file:///apps/torchaudio-2.0.2%2Bda3eb8d-cp310-cp310-linux_x86_64.whl#sha256=46525c02fb7eaa8dafea860428de3d01e437ba8d6ff2cc228d7c71975ac4054b +torchdata @ file:///apps/torchdata-0.6.1%2Be1feeb2-py3-none-any.whl#sha256=aa2dc1a7732ea68adfad186978049bf68cc1afdbbdd1e17a8024227ab770e433 +torchtext @ file:///apps/torchtext-0.15.2a0%2B4571036-cp310-cp310-linux_x86_64.whl#sha256=7e42c684ba366f97b59ec37488bf95e416cce3892b6589200d2b3ad159ee5788 +torchvision @ file:///apps/torchvision-0.15.1a0%2B42759b1-cp310-cp310-linux_x86_64.whl#sha256=4b904db2d50102415536bc764bbc31c669b90b1b014f90964e9eccaadb2fd9eb +tqdm==4.65.2 +typer-slim==0.20.0 +typing_extensions==4.15.0 +tzdata==2025.2 +urllib3==1.26.20 +yapf==0.43.0 diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/resnet_loss.jpg b/PyTorch/build-in/Classification/ResNetV1bV1_5/resnet_loss.jpg deleted file mode 100644 index 65b607e3e1adcc83bd05d740eef57d22a9c68a3d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 35594 zcmeEu1z225wr&$N!9s9Pkl^m_7J|ElV1cG_4+I)WAPFu365QS03Be&~W1(?(cjg(NByJ}ahzy9^FRU1AHUj#gSCa)w9Kte(SSR$SPI20fQ zz(7MoM?=LxM@PSZAL9WQ0X7yUCKl-RBpSUI_P zd3mWBgv127McH|HxqnuIbpQT+EKDpCY-|$lClpV(|Lh0+D*zAUo<2%BG7=5o9v%`h z9umA0Kn(yOp(0xQv%&xSfpiZU1r-e)6>56tj&Cx?T9&?w#`wxhSNl3})pE58qG4t~A3kV7c z%gD;fD<~>G)6&+_)zddHw6L^#Wo-krb#-(1@br4^{q|i@@cR!RLtH#t6l6RU)F)hMkEArv&7ARRxC1Z< zq@yz`+V9iyXdV)pyNo^{qT^knKl<6VUwZcc*0I39sAvDyvH#Pr82}bC5@PU>@c|cP6uIC?Tjp+(Iuaw9YZ#WeOYgBpC za-@#XdlLM}A*Vmw1WO!pnrJ+MZ@JlOHI&1%DdOWf#dC`<$9NOE{2oNeVMXRYFV41n zXrJ9%%3ySuo0s8*Z%-q~}6QKqI!ajD^%;84O+E8~h=|9-Zw4>LfmQZO`Z^oqth{jW<|~ z|2mM^6811B;CxP!GBpsu?O8l&#*c z%FPor2kSd6G!z*0P}!H%KZH4E@O>Z%B}tGI;e zD=BKcr0_=FvxINpW5LS>*tdt^Ene+|5Yyx5qB3y63;WwHfE01)s}V=o!1A8OGkg^X zcDBXxiWHLs56DECb00P1M!_bFJRH!|N(P~PbfWGRDjSn_3e=Ym3H-=4V?O5gDo?H<3}mYheAIC zZuBGHVv0LgNvgQw%MPg#4)dz<8r0x**%K5Hd9Myn_+B+-(f%}{kJ2P7-<+7^=|Eh4 zjkTKrp(08Vp`9hp;>kjdGpzySmg$JLvkElES7{q<*b+8~&-q1@yM!zXN1tDHKF{6w zQlSn;xK`SWb^pAmrGm3P%r{dlfohHs*;;jq-BHxO)TUAaqqe-RCjP0qXDr8;Cg&lp zW2~1rcecK+UBUwIu`ml?Tyd+VG1G;wMIVj7lQcqZ=w=BQXBxTXInwLwF&tm4Bqs6@ z*GqJ6vq=!Vw)AtGIuZ}HK0*@JaP@vKGtL`rZ(V@;ls-l%?R%sXHIpcn_uhFrPQMz_ zCf=nVn|qC*Y7!GB#ZLTu=OUvzw~-LJb1E)Duc%eJF_73hzwLOp1OvZMlv~<@fdth= z#x*)ZXz=w}v9Q*qAsnDR;plLx%+mHqDSwMt`92zL9p)3wY*af%r?2IO(LX({Y%YK*5FGTYv8VIbhWH&V3?6p_iGet+s1jiF)i7u zNy7s&-=%j_0`YKwM51J5n5AX;w&1W}@wBK(#^q;YU?STAG&g4cc{5iwIOlza zT-O)w3?HkvTFJevCBt)sb8_ZW9ADe7ae`h&SYoeRn!P5WY9L9+ecpOh_r7{h*Pea- zHuUltdYvMjCwaHjw}ReoZUtmo!9nA zNn z8SZ;93kv_jbRxF)MXW<#mUl-~cf`wb=Nb84!6#A`%@u(nM!h8~F*NF}n_p7M%?8kr z!&c9_7^a&!te04Pa*uRw6=#|gCL5cKf_^w@2VcEuNy0J82dplRpl5E*?5C8?^f~8# zKkgyFa%?637*~$-pdQ6Z!Qm{WaJ$aBn~Xikf+Jy_uCA__0Dtu<0S9gP&`$5h1KTwJ znmrk#+VP;d@1EzAb@y0QR9Rdf%;kPnngKUd4Aq#^`pwM?jx0wr4DMuL;ui|msGBSpjS6t!r=FN zr<=EOo~Su{T_0$@kfXdN>=q#|KKJsKI(G59UB4+Ek=c z>yF70@HfvRtShC2_)(*XtldKAtJte4rt~?5?9B!2nmaYmbCuFG(MPj(thHmp`SNyd zC@kzg*7N>i3Tg=KC2I#m&l3#k7Br3Y?DGVkG zHm{J&(AkZ`0k35k*~O;#HiS!>=jkDDe2O+}tBki2B43)_C#AMOCgEK}S_?2ukm#x- zY`tFpvVu$HrE7|-=#Zcyfd4`@y0RFrS`Nidk>r)R_=`4nA9dtnKYF>+O9|tK>XVWh zWo6J0GdMst!TduViv3U$&$pQDjeOd-hA`5j`OqSn9DVWpnC&T|-tVHhIx&*|^|~_; z%Z2(2R?s6@V#jKXNW`fwJ37W@v%Q-!3;XD^jvh;oJ)9QP4n`M0%gC3bpu)Lysh~+$ zkx|7A2zy0?yQuiO(ot^~B2u>X^Sqkv!!l)Wn}!rzzfsF^#&hfI#8W zcX;d(Y@Ie^)w9;C+9>5cLdXSTXN$ z?AH^u-!kN+6liaR1A=PEo05VLuQ%;2K)$Z{xXjXWmfem5 zH?Iph1GQDyt->+FKI0uqQQWR>c+y693vRvPD^XbHzy{7VgUU_5R&s-#D^55TT_hf? ziXs&lC>MIIXniD`@7=}}-+Gv<*DGy`9z`C%QBe{ZtUESyD>NE!$#~p-L9V9rNJJ5R z^_#zN|38Amkl|ReCE)$8(YLn1if`B8gl(l>KALNa8AncT`BjtJ0}yuJ{*6t=XXx(E zx}AvgL9!n9BJ^W*-y3=$S`G*dD)=G7i)YlGW(6g&@6~pIrhPh~i%}3-_7@{MhYhRL zfQoMF+3X<)%Ax@eeU+Be>|IgUnmviCdPE;#_pz#y>U#$Y^7UQ@F83dFwLVN#N>5S& zI@F3Uj*Hc2Q1qlr_ZO&n+}C<-L2j3q`?92(AyVInK#oek%-{{e1+S`;y7?XhMp`O@D-!& zG7+#Tkav-AlI0D44Hn*j1D22uJ?|bgGQa?G2x5(}QViU;ZJRoy5{LIVU{C#pv47UP z`O7I9;DAfXHPpz#k8M#0&n0ym)X;=%#iH2!#VarsKT*rQTmoQ3{4Vq(%pi z6lH=JQA}@DsQ=aSe_iu`&6fYk7V6iK2dbNTavJ%lCs*2h`luyD?ZonSTN#Vup-}Il zv+k`#){K{{RP~rWy>-Z(;t8EyRR#T*k7uk(x3-bLMJkCug{Bd7sTPUmwiI&6&A`=^ z#BYEq^0&x@H1Z3k%AdlHVeZ3lz@GwD)}KNX!0ne>M}IkiD;)4!r26fcNm8Qytk>-i zunht6A%AUSK?ls%flSQYsXW|c5A1b_h9Py3{#PX-T{r?zqEqVI zdw*bENxx!U*06`e$l#H>bU0v<;1}8z>_rX>YC%U3#>`&`WA1-@!*LM}zhy-JLqn1= z9zS%!^8*~^BSri;!@+>^1%Twn;h>5s*y%Dy*5WD#fp&J&-T=s^W&pn!Isx_}> zW-vu;_V!*zV9=6lGDb1yp8}ki4H=P>kJ@hi?b~))^1xe`e4|rBE8r^t6g;h`3I~k! zg0j-@g)sgHg*j-l0%(staB!khD>@#!ZZ#;oLBi5EoiDBB5z^onVy8(rN>@>C=|rmU ziI$?9awc2zGBbn`@Q+*yj{%P6DApJ~S2`gG1YJ3oxL#lQu8soL8}ZY&Hp*HGcuJag}!X@|%WDoIcjT&F{HRGOh>X5cgCpe(r4ZNc3 zw#U#(GC zzO*37Krk+TT2|q!2x#dcO@6i2RyEhCv89G!W+(^-N0*D z0p8U5I2+_y#RQvsZ^m)VynL%o*S8GMSSL*Kf=%rLEPxO#fa=H3&dE=lQpb6vby8|R z@mS<`E|{o%^(HoJwpNI2j=2n^gCTA=MScI-KPo%&*bdZ*i#|E*H} znEU3gI|~RG^RD$r@x0Zm%IFEHc+qHQ%wM&!wJL|^k~+U5{dVzx%+d%b{u z7bc3e1{U-_V$w-^KJH9H{(kQ3)#bLplbeZ!%ct8c!}os-prufy5m{;n(meOV>2R?` zuBfPY5~T6x+q8*je(otfbWw6{@~S=OAxHxW8sRE4x+q>(H0={ko zAjGS!)AIsTo9PjMXrqs}xn4_Ch??cTMY8#Z34^(v+&^E#^D!+asE(gFvhyv(J|@3|-XaM)iu zIXQ+KN#t4JYZ`x%nf8W}8enNoJO|Ns{83Z9zC~RPvwvD6rnD-0!_$o6*jeKGe4qNU=3)gf$c&S(WJQNGZ^CDKW&b0T?+NI-eT=_j*XhZ?%e| zsF;L3ik9QYb7Pc6Fq3TT%|bYhtD~P_q+?a_)RCU;GWXmvkiF6Mem7OCcdkctkCrX@ zs4Q8wq?Wr$zX;9o)5F<^t|5S}%~B6A5nFC*fi#=*qw!A_1OGV6@eha{{ zz6Dy%>CRMJHGZFkyXDOX&eZBg9`g;7hS101oIAIh?u++-p14 zoE&Vs^-+SIzyUn^^GI+&jOS9>4Js78nUjta@*l3B|IylN7el8}9V49inDu)P0BJnd zV8-zyFWW1Rx5|^%J6qA7)gE<9EX*6EPn1^ZTn{A4+;rF!^dP(+##Gcasw)*lG)k@Y zJE$e~!zSuiBWz;yQ(g!l#f`6WieZRtBB%bv>wZJpLtC(2JsDcsV^E*sNyy0>!wc}R zT7ChaiUTO=e6-rU@`bUhIGasP_Mnd58{e8bveo4>>9{a<5h>Vbo^Wy6_y(i* zQjX5CWhLVidyF?fY+wkNWeCQJE!aMOSVa?=g(IZP8i#VCs6%Rq;MkVrW>- zPV63AqE#v}i?$$04 z3}ZEdT;D2ktwEX+6OcIwvVWEBob1A-B3 zn2b=61z7H*)U?we=CbfZct5Ue+tXVRl2<*xrnIKq@IoM1GL)31#>ndl!Q$SA=#<>= zwZi}%Q{bJbEE}<6pgqqGJDZg|UEI8yQ0$(#kG!4(5=t_^suLQT3wi0KNT;W;!Us1M zc3)0?TZocNm2q>R{+B}&{-DQ!Uze()aAiUcM6V<^W=sdc-I3yISLX>Qrbp>Gm|d+X z9JH;q`XxEzm{rr9(s=`tIjTx?$$|F|!t`Y9mpO1Z`E_S>bCxM-=Fi74 zmJ@|IEaY_`h;X4VL>r~PNjur&SZ`a=sKDw{4S^Ll>W5Iy|rP;V&uJ_L$+lsEZ%@(erZX*yuIm?qEy*J!N9Y zrLSLiOw_-81sL`MHjzw^a(;D%0n2P}o)orcEf@}1WX-m_I#Wb%=@Dxw**k>Pi_lrJ zlGKvF16V{FZoZ+9c8TrbfXw*XW%-C7Z~aO3&5^cZRLuYMOMhYJzcA#& zV#_5bp5Tc!_8~KdST8E-9+T$%jj{ScD&QNoWo&_t4Z}zcNRmp7v{eLM%-|CY!qCUa zW+;)Lt!WF^o4%Mwc%?E~7|^;iZ>~HESPT>i$+BL$BUTIaVyzCvv~eIP9__IEP>I}* z1!$*8tw!p-49aOfI(vVvGns}xJ!T0OzZ2c0`o z4>Ir5-YmT(jE?)F2(YJn)QtNnWTtuKG+cg$9JI&Wy^R6DlM`zc?uigrmq(WxE0rT~ zl8u%PrLehI`1q^3FhE2dop?p)41(RP37qlXN}IX%V<$TMK7C`Xf4A@n-59s+VEEbI z&Tss+5hOVR^*=3ylwF&xcI>F->GDO;2{)wW}TyV8wVpSmRSM-_RRR zk{emoWk#QI(2l7GS+zoYO zfm!+`C7Jdb16*qxw^subxG~rzC7dKgBFh~&LiTy0 zqjiXdq*QGOVW8{AV{==s*movlw`phA>w$kf=|9;kTHAH)HQB~3G8{m6&|!Lxi7>iR zWfR2PBFBtU@SnIMe}%H}ztsq&A+H6PB>%!WA*AiJl5F!onQw$L>JxjX7adcg zsHY8>?__2fE|L~%=m(oqxu!NugkQ_2kp2+c+B%?+c(DgFsxL3Efv{#(RRx&`as_GJ z4i{YH?WVT=;Gn;5g(>H0`j+hN8l8YM_tvjZNppemiE{5eJTx$t)CMD6)Liu#GYe|x zrel;0Pzf{?&3+m>ir!V=uyBk%OC;`NkohOsR}mu%>I;b`0`J{ zz7ZkF=`P3^BQ(ALHs_ax@T^1ZD5B2-q>tlJnk^U(*hbn12~C8trXE-*YJJ+D9B?rj zbP(i-ADWjCf9c9kj`_fo6@i68Ww-P*B_ojLUf$q0CNE+}nQJZ|3Q) zq-D}9=H`?~wakZfp?C`mq+%bjMc<^ocG4`sqS&D!=pge-Brr35ellGkTyT* zrCe4{!U3k2Z-E(f{P+fPIW-#z`Ls&NO6%|Fy0aZ0+aohz)+ghKY&DbmQe`ZOFlAPS z3~GJ%Z@=G9t3WFcRl9J>jH=5OjsEDYU@Z6ZH zb$y#}NTLRXQa2s@?*v@jU2U1P{9z7Jq1oiGCyIE{rRqW{Y)v9~{LxeZ=DB6go+2-3 z*-E1wW9cARq|Q^Mn__UlolYbczSpGvypf@og8*wqKt-96P~R!Sev?n#h%R0T)GIfQ z`}p4DpF^!!D*do|02@X~@Mlw&A1X348MQTn)HwmCg?TDVQ_TnBcP$cSoguN;9ixVn zO$c|-)IVB|0*_9ECL2L)^1lK+?xo2Q(*H+TM-dspt8`F2lm(*6o{auNOuJ66Xcp;* zyJ13=;Fp*i2lA!jAAZy-cAp~h>yh>wHO#&o*3pdDZ0kxol6``Lu$nK6TR17s`Ccx@ zZ-Gw|p%Pul$&tOOel@(sSBvq5V>hSZ7)qnO`*m5(etUZBLzN+ng%5vs8vKQ#{NJOl zso#TGQ2Ot|@n24_X&UN6_9sEWDTGWTzZ1iq{W2^Wi22tPA%q_VOG zVwX`_8SuC^NLqmm3-w;Hh9t#CI2^EwjBpstOhn)SP4a4McKrb2;_N~zq7B6E$ z`EL-3*p7t^aQiJ_{@#0TM@}h*xJVu#H1$glVW+L9W=?MO@~QEF-J7J5w+FIDz?JD^ z@tQeZu1J*-C0?|kNs;`y!=5$Y1ux_s_1vw=FzAK5iucSyfy09wQYj7}w>u6*#A!;w zz`_9UEiK^Z`rRVj5Q%22f?YEI-UC|dR`4dh=Q2|?R?xC74KuLVo%*w>d(p~!^pGNC#9kC<663- zB^(fv)W@ATgR_3u7B?{3tJZ@`_e7V12e3!!^Vw3#VfDtNz)(-pm1)% zQXkI3Q!QtkyvqzfC-L`JiSv_=xXv?QlE}ljkA@Usjvo3u(tU) zbifQb+{Du(Px=Zv>I-9Y7rLysM*UB!u*~NeU zQ9OvM4lT~bZhjj$zh)No7~Nb}38krWg*F{MgT;j7T@A*W8pf7@^IXpSjpD`k%KHL> z>f~&(vGosqHu!9PY36qneTA2@<6g`{tB2n*p?bvAMYb#m?q~rgIkyt+OWu(;7um$q zv4urt0g^qJ{e`GWw-X|)ulN=9$leq|TjQiVtul5}Jf;6hM0<;tY#haE5#^9Ma?Xd(IPVRHgc< z;=)P`yK)-6VWtJuc3ZsjgTO0TFiZO3MMS0-OqtT^86rFT!7Pr{`K;gt-IR&`3w8*# z*5TEZnbwER}wz1TeKzSaXD+ zPnFYE+0^w3$EY0P$MV0$Ed6`b@prL=KSn8qeoJ#)Q-4GK|LjGCg+h&2oCV|={hgSo zeMQp?D=!&0kFUbuI%gTUKIu_sAn?5W3oaVxoZ7Q*EuXZLYElOCK@0?C?iSJw5;)+_ zq|U@r{{>d|1^X)MU4eOxQ;EsO{`FgvJT>}ivhLKjBQA*1U>NR|O~?s?dcDuP}T@3*tmPA<%IuOO=5CwM1D5MvnpdkFrI z_8B59F;O=%C9z0-#o$MVh=rQe!9ol-sqc(@v0Y(*v)JArj2g$a8xA=BDWjK{roaI? zQBw-W|DF{8`P+GO3V+Du11q6}Pr`9X>R}WV8>dCpVHx8^Sb>~!&zJLviL00&ImN`x zVFl`M1ki^_(Qx)UFU)5bx8XvK4YtfDmmYl1c9&KZNf1ImD|wd4?V6iI|7ujnJWGJ2 zDJvQxbvPADH|7nTN!{8LqmA-Cz4p8y(vSCJYi>H?_4qb4yG2YKt8X;Rn%zIvMJ`fdZ1GBCs???REN2CC0&>gY z?o3B?(%wp!;$X4x@ME@4^cSBX>F<7V1DlO$DP>!a42-rcB;)tj2c8FGzAPbAz$Z$f zps;@>Z;TYioqQrm^-)fD+#=PUd!OYMi_>9aXM1f_mnx2rtFhiY-4mIK-O~~m^0V>M z;*Sx_Dz8A();8x?X{_(@+8!Tr1xoTFEoc}Ihn5u6RQsxFAaarPm!&A2_2-)9R_XR0 z6a?-o0g$615j8nW?FEt8z%0YZ+u&DfHF%L{pGk)Vhf4(q0qVaAPUEeJ`3))BpVqur zCq~wTbe}^u9v*q(A@Z5DtYKoqsx{ExiG%#cLHL#K`^R_*ngH}^rJ zHVCOwHWhKdsk<3`FBeTpDZ_&G&>PRV?k#08PS(VGVcVwx~6sIavf|vsAQSBfbzT+{l<&!T-m_;ve}H0g%5l_1$oP z&rU|r<$h-j{x}x@w^uak&@+(~>TgqZlR-q<)2)^tOw4uNb)(9hLX`1fgRYG)a@fj3 z+(OC0gk8%dWkja@JQS!Uaq8u9&zboPUKYGf-Y3~IQLQNfTVg1j>*n}9olC2bTHoy! zzn~=@rj{|zY2JFh9FFAHhG}c4&1EvcH(RM+j8}P=YDFHBs=99c%3q}EvmXS+OJx`R z`a|Ek?6g~NxAr-0&CB*BjF@%hrV>+~mKOdoBN5uNyN~?1Y`N|n)<|-$$5fb-9Sb!JN+e8oN z6fP`{S4J+V1B&4QLg0Jd!->60^d;0yB7OVx_Z43Po*0OfgoDYsY))WHU*g!p|CCAHnkoRPu zm*U!uPB~&zuH!iFOD74k?H%t5jilOcg6uU^Uxa*8Jb3RPTtoS1)=hq8lBkqw&qKB9 zFm^5Ca5i&Ho5Q!64i}&`J6kXi`R6Di98P`jNYig6lYUvbn0+Os7Ev^ctN+79;0E_X=^XuO zW8?P;*1cxA^w}BKk92w%I&@l5ByJJ`)Hynb5qVE@(q-AXIoR}}#!TCkf*aeMV6t6% z)tH#AD>uOyr^4myos(?`go_dV5AS09v%40**0cH`tPGGE^7?3nX?k(HH+S7)dQ@>S z63cQc%i?>fHB`RwZg2qJRjiTWUiB22cJSN}>kl6-X1wDzsLtY+1*we=0~d^#l$)ID zqMZ91j}_LUue8l5fa7k$d{=Fw)5V5y&urVY;Q;#Uq7*SgHKlRdiRvVy%huha_dOZ= z63Ee)I{l-$DPQjAmIj6}W^*D~*?(Ey)L&Wm-+?Xugmm3;!zH(Y+UnsMS$6ZUCGFsy`(;KaIwegrw1AFBtupg5~6?m{OFdr&Xdae7D z7Ma-2Bp|kJl)KHTRQE2H6UPiwFwr)NRMIT9BguC&gry1*W-L*@STR0qNx7TJoa4XD zJ?}CMy9iwa6RYm}sd@#!P7|t5wXZ(5J^SJl`K09H@EgK%0!+`4FTf zJ-Rr;t8BtPH?Oz{p|X6x?m-uCv9H8{NX)Y6_P$Gwf^LCu(%I3<^1}D>hMp|%)55BA zhi`pSQ$Tg4|K7p+>*DQJ?O@KSQn#Cwrsw8HE@<2>7~)qY&vNn=u$1r8?Gs<`X@r@n z4fd{A7Ux(oHW|!ayTh6i$DEr&^;*-!)21wtzqK7PO`MtH)ZNEQ1J+QV_FV^@iu&XDYsbzyghFAr#arUBEeMqaJny27^ZR6MwPW4bHlw>Q6XS0)FFNtG7V@D}!1-+|kO zfO+W5nsa`plW+4gLwd)5ZKwZe$NvMg=3n!tpT`!;_PLu!stmLCj#dvwx%b1BDDIGo z7{&8DD`%$Qlp_mApT{MUOFzxFiHf4kl3Zw?qgqb3%H&_SzZk@RS;>Lw}nMRA=d zCxw;CTHD}?^9jg|ifEb`snJWo0pPeM$g7A64q$I2SC4Iae*!G$=|*?F zsOTor6f)9Zf(}vvUSEHqqM8~id|KV|!N|5Y&%O&GWsD?Ds9o)p}SMIz~4A9Z|0ov03AH z1VF$h0%HGcQT7)UPLD#OTf{l*FuIJ@9)IJQRnxpIcJGQ^7o)l6>7so474$=|rdmI` zI?vFOKhAo{>aAn(3!v!_ChXl>-6_4NETN$`C@1;TU9!$m&`JeC-N&vG%V6Z~Devwe zwywF}hWnnZ>O-R+R%7@C(PXKeuh*P#RneR!aekwr;IF9R*~GLvX7?29i)Zusim7DI zKKX>_iAVd&^()G0>CB%mGyYB%?>!_E(U9*Dd-E~UJ@N6Jr*WzC`0X0w0oyF9m1l$E zMo>hCap-`@BT3&Ljmt^GxPNv zloK`9hBbWym^Zn#phZ-Y(PQf|=B1!*@9>Srt)Z9rd^ECQi#c)w<~-Rhs;7H2j1o$x z$!O^=ghF+?@rxoudyiDHmv*dyjUo-pD?$ZLUgF2amn1b-7d*u6(qLJ)Dvq~`S7lw` zueV_mK^*&S?QlT*?NBSi^~C=659WIQom|a`P#SLGU|o{Or!)=thtLjZulLU;?5azI zA-h=n<{_gBpEtdKZ+2wl>SZO z4bphxcxJ;qC8&R<&2dNhOh9KoxlhCsR#PX#uTf~QHHsnH1dWqu;igRL5fj0gh%T0$ zryJdf+=WbU6UnNf#Dt+lS%wKXImx;6PW@pjVsS@DDdYe#hLXoK*c!#ai`UUY(%BT& zl-G#$8!&LVUfH-GPWZzlHZlg0cG~cGTO$9Quf%vLV`(5%L13OL&VD}uUHo$;q9EL)QU zXEj6LLW&d0%iXfsNf^0XGm zhqAQKQiIP3b*D5dTIzUa=Gu8>in9F!&Q1$GQyE8!r3A=I@@Wf;1E9yoi8=QLEZ=Pq zl>R7@b7?zFqVF3@$lOjCy@-1<^Ueo<^3&z|3{uh_wSJYxMXHfjwa+S9o0(Qc+9cD3 za3XVRTH&aBXJZE6@ZRf)tfQj1Boz@JZODIrVEuoRAOCy)0SkU76_tt^ZGGLR znQad^fUjokOl#9u`&xI7P$M?9@u`NUu63FT$S}Bka)xEgk@~}vPo#9{cFv65R@j@q z64w(D8)31CXv<0T{+Mm4^*IOoy(bI>!*sfnXs!XNUyAP(H5+Q22=pa7vK-tU_4iMW->vx1i zjunBwq2?;V0n~blDN}bIO$=A&jb5{TkA!rkzfzWr`;fW27oM?Y3C=z6oMd1@>|Kl7 zOAwa$aIojUMTl^ieE{AX=Vhe8g`HwnKE<9Rqh50tu@2`eJsmj-yM@kJ;tJ&@d$39B zdsb0bD+lTANwVP2bC_yKOqZNPVCGF$K5{YDX=4N99#Pp97mCGeY8hP&ER2^J2(Qsv z{jjRtVaSieGTI@D;B6X6xFZ>dCU7U%X-imFpvH94b^osd={C^9pI8T0S$Qv6SJ z75;-HD?G81<`~vUl;_(nBcbl4hYEPum&iHiB6F0r-LYwgwPvitKI-j1dk-obnPgAS7-aTd$bt?Z+=PEx5s}Z0 z->y5p2_4r32E#g3@MhYe*19`Rm-N0oJIZFhzRn^Fp1FMam{BkrCsl7}6}Y0%+J_Z+7XGCCp0{Du6ky>RaIw+WSg}iwNEQ3in!I65=NNDx zNmi>vM}^&Fz?10EM0#XahGJA;UWtHHBB0d46txG)qLyw;EUewRnn`(BaUo;GWt+L8 z7Qauf96t?}RiT=4NI_q@*7opjo+4I8nX#H{coXNTt{0H1W;-TW-<3n$HV2b;{U(&V zT8qGy@4aMYA=PaO>Z6(C=B8JUW=}~XFL3GZm_cC5O&6s#6J@@4cgxK|S+v{Uel>MO zSyrM4D4rU;`CSGA*k2=>wX2{~?$im+D6R|_mXaOEe9g9FskJw0w?gj5#FM9noE4=V z#(ROyz1|HIt4A+;UO%a!PGV){m=ntH--5dv8HnB^qkNh7)6X+ z_`wTw3;(APviMZ-frTz?( zw?w1>QPAb&aC5f1Z`0sgapwsblqN$nC+^4m)+j}Y1-d_4doWk&j*L%q( zNNL$hB}Dd1u!`zZMN|_ZK?Rc6Rf4|)3K?MZC^7_be?bai_hPmO^R|z;Wlyqt^9#9q z%-`(4RwsN-cLvan46fmysCIAkfhzaR_l_BUJIsH)SfD^3_ufgx%GQvo&TodO99#vi zb8DctAle{0W)0E!#E;3;4N0bwB55zd#oD|u)Fyj3bu2xV7_>6~yxe*qEKcV^_F^(K zjv}d$AOVtT_pU^FNw7S)#%-Sm4p57lQZiNx7Zu5rLv1agihP+EJ#l(20CEV_RGF1b zxg35RJ=tXYm-(!JsjrLpU&iaqt*I*kqUfr$6^)uUu0i_!FZ8`1q`=O$6{WMX-gk&C zfPEvm2e~(=x8XY8Av1d?x8FBQIP8jF={wBqFD7?8s3Ai`tb&U~FC;9R%M)i-P&N+a zzk2i8YAF8@tm$K+hk!37KHL??yg6#}Mevk>OHzE>M8eq;?C+BiM-`e38tn%cLC<;F z2)|g8=>#}r1T}(1bqb2@*VL?q+G~UZ zxCG5sCwE$hZ$k^nWCLjB6f;lhmKD#BaH*9$WJdyL=_nr=kP`Db2>M+d>aW~HQ?OgiFvta8IsXGJr zJgQ;MLthL`WLVHNxiN%LG^bF_K+7CB%`y7=ir3~*Mr!fSw2#G}KNk~|_)?-wO(vJW z&0^(fBRaQaVws|mkm{$^*OMb#C`HdWm$!Owq6=9!1`gJd8t|=`9D9y$+Y}E#2Q@Kc zY8wq_y8_KnBldy=`X)atAi{+v~zRqIjnGsOAMgCZcH@_R8nnmENHBHkM5r z4TDMHqWM{pB-NI>Z?Ua5=VcH+lUqsi=y(+dt*>+r`rNo}hHCgqa!M`YR;+xwSN3JS zKQgNhYLeLaR*dRwZrh_g9-&VW@|9*;`yi`13X48-8XQ5soG2WhZ6Judaep$JpEb0+ zH(MhI2;|0*Bh2{x|C~ywF(}Hv)e6+4gPoKkpB0|Zq1F=y$!+X)4kE)^EzbzAczWI@6P^%_>K#@I}dJ zd~|8M>$tw@nwYUrrD&S%t*LK1xs@O{BSZ2^dA?oE4z~QN6o|Izq3Ux>-0zU{)6%T( zrRU=>zLg|5n8aU`x#^0wKGy!StRkQp7abB4Wl7<)Ui@O47$E}IL2?_o*%dX3tyOVs z{i;-QA<(|a?K6C(lH1V?IcuoN!Z1)Y1~@Op}YL~ub1~LeKKABlrx-#`*3pT*_$>Y{;zSuO+Z_d`Bq=w0kCPL&i zaFu%gSp^!F&?c_xxR15RC#uxMYg?*5o7wo*M(>|Pd`_fSJGAFsAU0z47430=S{cEB z7>%+~T&jxd`J%skAKex1l5~N9qbry%UD!$Q;{4b<(M?0n&itpw{y!oc?{{RfznUt4 zpLipYv!fJxBe?cbFbcc0O`c*hg{#As+t{=_e@SqtBNZ1ArNB~0dyCr@& zE?@geSMx5A*v!O!kQ@F{+Q`>3ay(0|>cy}L^3LBD=1(e8sD$;Cc5`BueHVY9f6Ma1 zYhcBc*J^hSH(=`rVsqE}i`=GS8y`MwJ2B zLQ8KGq#6=p*a=XA@brL=U~-hzC|m3akh zIFf@|iQ{R)wq4bIS66=Mi3%*ZWxdetfc|N9L&_ZYpgqyraGs%?@TWSWN3&gdv&A2r z%D*>}Uc24G0nZRAp6}8O)XrKTwW8mB)Y(M5ZQYhg1Y_Yf7>Kp#y1;&Ef@;XEsUd*D`KB)5{{5be z9YX@mbG-zf*gNz+dnz5m>o5G1jpG7Lh|G*xN!;Pj9u-v6IgNFbYYpJ4d8ibcpW2** zozF(dL}uRh(b`(m&B8{*i+SVONmG~GTAq#ahK9;uqVZP|+Be7F?j_ucXjtS9Sgj-O z-283ZcPHos>d~>yA{|m@hU4JSTAXGt{^CXIhwczczj;e+ccATpe)=_Ohaw}C}4e~g~C94TuLK=+vqP7F(mg_eCb1?i>43+ArG*1Kn`+1X`cVkq@Z%?-7I_LTOyN<0a19{|4SE2`C@I{YW0 z1EJ0=r8G9`OE|cqXM1`RIuB_Wb&vr_{!F%UPuB*SBt}#VmE*Zg3!>4|is?HY`RmrZ zi!l|>bxb-Sv^nA@G9lMn>#6%E78VyQ><^tI;tJ$AinP|&JVAqXR~2{Wc>>&(eo7pD z1l@5~=ZwYks4*B38kSe(ug|U%Q^{@ErtBW}JUo@dye%|jkyFK+pcT+`8LfyUaIx@a zv52m>Cg@|cU-vzRb{>Q|SfY%N>j>sG(kqpG9uEi=)c?f-Ns z?f2xhzx1=<<*}JPk6CCJ_qAfc`yB%Tah$`wvFNAKwhaf5UR%QflwA%QI;8y~BKVB* zrjHu>B5`*TxrTy@u10X;y@O38PjQ5(pMlR8LfF772OXH)7d}Qg5|>-+jfu>6B`~_Q zvsLR|`@`C{GM;!-DqwlJ$uo#L#3=7tr3&d1FERQSy!7FEI|MuzEOQ&*D6ihH! z?Q@h2@pXNc+J*K5p#m@q!p~krXVW=Pn1cgQ*4viE#t@lbks>C~E+D(-#jaP&)zzkW z20O28vXn~pmjvS{mpl|xFHcLnzfIi=HzxJ?S_w>_f&9!GUcdpnb%lsK1err}*w`G! zIw$mZe69?;_XzvCmVIB%(?`Go9mEI1Z~j+%*BRAhx~(%*ML>!)sZJ~QPkQNXKozOzK-`pQ}T|INp%sThX zth3Jb?_O)~^?mPpzrEjYzt8h*6#2cPg5U@~FQG&Ip=V5`hS8x+`bv5yQ&I4-Ub$P4 zSZV}$YyVx~g7=n%b*0qtEKc1Z_jMJhjd{c_5xHo84zpDgVH_lY7#B6kR2bg}X_s7z zOs~*GJTN4spRzieN{}7elu3zylj;)krEwfvwvAU6zI^%1FA8D7Z?r=2M4)|{Y9n(= zuXWIah$dTNy1?3Q!R9dFxQ&XPBT-!M-#=S9LUP2qAA!CgW%v(@5GVJD)o$CykQK04 z4jK84n<8j&MX(J zZlKf19AfUY(4Ll~QWCjz-S);J5(T4jp1TFCc0a4D{C>qn;#tbYJck$|p>gh@C#3X) zx3C(j&LEjf{Y+3`uw4@^GP`VMJ@v8UL0toYx(oow2kMEtMXgICHWd`{>(`g_M^i}7 zM4$hqdfR_>L$1#yY~LB@rff);ToZl zdIY-jp6VUiVpX8MU&`e2_;8Io;TB{v5qQ`m5QC4Bl?)&P$rF=#XcZ};y-ENk1sG~q zQUJ2LUn?7wdblAx2Q5$a*(u#Td0(i1C3N;z4|4zMrU~p8@(48fDq*95bTZC%gVVq6 zN842)c+5wK`wyMs2EM=wf%8H>=CA{RC0puc0YJ#V=t4NV_Sy7S=xNvjH(Ag6E=Lhz z@1@0y3(T0PjvgJyuFhuF*@!UKwAid-mvZ>N*FHOUdZ1u3Wm;Nm6yOH4T;<;QJ0*ssk}FumPfae-S`>kQL`K)hV#etc(o;GEuyj>&$@EY zG~4snJ~t452D<5WQ2(0p{1FUiq34;cHR$!lR$IknN9hDW8!&`o8?Um*>RB@!mke zQ$O_B!&9k0QoC{U?-~Y5Npz90mYW$;D&5I;dg}G(j^P#soU19h8w#zRwD@j;1wTr+S6>m4;srxXt2naK-sS>8`?xp9{y^?V>-( zhUkoTxZr?%IB5vf2RlIBp3X0m;!>v$s2Kl^HDT9yt8{OBM0*Vya0F^E%E=B>>;KwT zpgpC6g;MI4GIzoCfJCPS){H#`i~}(Hj0tF?BL5Yc>EV0f7AFOg7OLY=z+k{D)*^^) z35xi>6=nawoeHv@owMQUAt?g!k~T1?JF1mF{H)1Ob5;xg*i!*X>!0<4|MsB@#n!3O zBXaKLuHxRhelhUs^c3sF+yp5Rj*^H;Mr>Vd7VGoGrJcaCq_|Y&fid3omKeMqAW5u5oF(0 zv)r%{F%ZS&5agA)mj1&&&{R4W!O@0c4h@PFj+f4zMC*JeN^f-*K%o;!`G ze*}8{UsbGt!cJF3pjq4EVK+F1ajyCz>Uqvmj~yuz(J3;#{g2I{_^qCfmDWtqL5W7o z+b&0i@M-7QRW@OCABa6kFZfnyleO0ESgsGD-`Tku)eM%JG}JFuhL@YEbpjK8*p6w% z05ue9u;n0NoRxP-nux>0>rDeZ2J6kF)A?E3GUP>OROzoNq%AG+L)XUDjpsgW5(tYt zgYD%PdY6h+<2@}HhD;a?v=j|y(?gWzm7|-u6vOW^@@Jt{1KA zt934lce-9tgO2H7g6+wi=D2o4I;l|rp{T(eNQ51Mn4w7M%?+2d$iS!=$EPMwa%^I5 z(lYQXvw0{g&xo+cna~Lxudl1ctdy+E3K5M?9HgIlATgN&PK=Tcsv4&WO0p9(!E8OX zJ9!n+(xVoC;=E>j?X2(Zp5>tO+Xj&Bkl%;x78ER6)oM{!*$t=o;-eEB1HY7`WM-Ztmu)U~ELr2L!;3Gtj@1%2sCbQ8>(vrc z-U&fG^8Lva(T7dvB+ZkWQSt0|bg!5c=KP@Zo+ga@5ByHEs&uHR7zAbEb#^vBXkzz< zZU3X4|L39W;iV}p1tQfw4Nxrv3nH(;9*)13yq~fqzGIixcP~J{t}2Sd;EJ4t-L5YG zBTW_Nz!9$!{}7gYK^mEr6QlN{sT;B-f|rpVw6c6Q`l#_eFAj8DM2yv`)+8i1KiYUj!> zHmj+E(-vLjt_+lH>7z7jS7hB~^EOekC~Ku>5Snlg5QrWSZv%Ok82&C6K`LQ4iVg!` zTYRw}b%-9`@r^0C?}^EIT>Av(al?Jj8sSfT$JD&863x>_#*7&kxhR`}i_pH&B7!MH zxK|4u(I$Edt7)KaoJeLHM717!7gxHxyzISjPq(55ZZnrM%im?MZ_4{#^KM3jHC{On z>0eu~mOtsjP4rP zb)D|kyt0Bn0*Mojt4h9(%Mfzj5n>(0k6evy2`W)ERs_=*lijnhilDn1LlLx<2A!H= zfdjM;IfH!svW&UrTE~0u1baD>A0J?g%_^i+8VAm=uz34Gp;$L>Em=WGkM=X6nL54k zz8<6V3M(I;i0-E~{NG4xe+N|drwPzLph~aLhH%xtIIgrOOJgh63Ge3cy9yqQ((5qu zA|a;VF@1jMJXHd-9=h*TUS-3M&W%;XO0K53TiRu0#6>L)62jEa)!XHajgH;umBF6K zCR`P757i=PC~Q;(ycF=N7DzBPu367~OmZuh6~)m4pl9V@%zrwQ?yf@5l)J)rP}6U( zdZBMAO|PKP)mKP_+Bp;+oJ0~YtHKQrPlKC08yDt|EqZvW9D%&II}5fZS&Ur^cW+O# zNV*yY={3HXl6c42#n~OlG56$l#{*RfKFtA!xsEs`BZHY};oa@+RsVBN;H8+|6Abmv1D**K172J8SB? z3ckq-Ag8ov3@nBd@T!!`kD9ldoyLV%Y4X8|^ghbuv=^^tx(_ho|c}Yx^{CgS#C%)fHv+ z0JERvvPUI+CO{L*U>NrL)i^t%cBYi;BRr71VQS2Q#UWm-)amV@SXtd$g#WO9x4Np& zy_=6$Zc--N&&R&a-F2|2YFZ;r^rCO&;N(_ONup*|guqSi&wrxOE z-<4C^qkQtK;4n=}dH%Ub3S=JfDa?iBMdclI-&#swyMVK|op7nRadDHj?!3TFp_2kd b9s#KE->+VO#_jp{nBUj^FWLjzN2C7 Date: Thu, 8 Jan 2026 10:29:52 +0000 Subject: [PATCH 3/3] fix: rename files and update code --- .../build-in/Classification/ResNetV1bV1_5/{readme => readme.md} | 0 .../ResNetV1bV1_5/{requirements_exact.txt => requirements.txt} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename PyTorch/build-in/Classification/ResNetV1bV1_5/{readme => readme.md} (100%) rename PyTorch/build-in/Classification/ResNetV1bV1_5/{requirements_exact.txt => requirements.txt} (100%) diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/readme b/PyTorch/build-in/Classification/ResNetV1bV1_5/readme.md similarity index 100% rename from PyTorch/build-in/Classification/ResNetV1bV1_5/readme rename to PyTorch/build-in/Classification/ResNetV1bV1_5/readme.md diff --git a/PyTorch/build-in/Classification/ResNetV1bV1_5/requirements_exact.txt b/PyTorch/build-in/Classification/ResNetV1bV1_5/requirements.txt similarity index 100% rename from PyTorch/build-in/Classification/ResNetV1bV1_5/requirements_exact.txt rename to PyTorch/build-in/Classification/ResNetV1bV1_5/requirements.txt