From 5438a966bcf0de666bcbc7e594dad1f4b1b03272 Mon Sep 17 00:00:00 2001 From: ManasBommakanti Date: Sat, 25 Feb 2023 16:17:01 -0500 Subject: [PATCH 1/7] Added function to compute mean IOU for all classes --- official/vision/evaluation/iou.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/official/vision/evaluation/iou.py b/official/vision/evaluation/iou.py index 1dabd4af..b0616b31 100644 --- a/official/vision/evaluation/iou.py +++ b/official/vision/evaluation/iou.py @@ -119,6 +119,10 @@ def result(self): return tf.math.divide_no_nan(true_positives, denominator) + def get_miou(self): + """Compute the mean intersection-over-union for all classes""" + return self.result().numpy().mean() + def reset_states(self): tf.keras.backend.set_value( self.total_cm, np.zeros((self.num_classes, self.num_classes))) From a830017f65a608213143d392bdf9182c56925924 Mon Sep 17 00:00:00 2001 From: VisvShah Date: Sat, 25 Feb 2023 17:02:19 -0500 Subject: [PATCH 2/7] Create analyze_model.py Created a file to get the metrics of a model including: the number of params, the FLOPS, and the activations -Currently initialized with code to get the parameter count --- official/vision/evaluation/analyze_model.py | 38 +++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 official/vision/evaluation/analyze_model.py diff --git a/official/vision/evaluation/analyze_model.py b/official/vision/evaluation/analyze_model.py new file mode 100644 index 00000000..300e2f91 --- /dev/null +++ b/official/vision/evaluation/analyze_model.py @@ -0,0 +1,38 @@ +# Copyright 2022 The TensorFlow Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Metrics involving the Model""" + +import numpy as np +import tensorflow as tf + +class ModelAnalysis: + """Computes metrics including the number of parameters for a model, the FLOPS (Floating Point Operations per Second) + and activations""" + + def __init__(self, model): + """ + Args: + model: A reference to a tf.keras model + """ + self.model = model + + def get_parameters(self): + """ + + Returns: a number corresponding to the parameter count of trainable and un-trainable variables within a model + + """ + return self.model.count_params() + From 095972bda8157453264630b1deeb2840afdc487c Mon Sep 17 00:00:00 2001 From: ManasBommakanti Date: Sun, 26 Feb 2023 22:54:22 -0500 Subject: [PATCH 3/7] Added functionality to get the total activations in the model --- official/vision/evaluation/analyze_model.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/official/vision/evaluation/analyze_model.py b/official/vision/evaluation/analyze_model.py index 300e2f91..34fdaca0 100644 --- a/official/vision/evaluation/analyze_model.py +++ b/official/vision/evaluation/analyze_model.py @@ -36,3 +36,20 @@ def get_parameters(self): """ return self.model.count_params() + def get_num_activations(self, model): + """ + Args: + model: A reference to a tf.keras model + Returns: a number corresponding to the number of activations of the layers within the model + """ + total_activations = 0 + + # Iterates through each layer + for layer in model.layers: + # Checks if the layer is not a flattening or dense layer + if len(layer.output_shape) == 4: + print(layer.output_shape) + total_activations += (layer.output_shape[1] * layer.output_shape[2] * layer.output_shape[3]) + + return total_activations + From b17f5baf240895af288972ca114bbde1e201d709 Mon Sep 17 00:00:00 2001 From: VisvShah Date: Tue, 21 Mar 2023 16:50:00 -0400 Subject: [PATCH 4/7] Wrote Parameters Testing File --- .../vision/evaluation/analyze_params_test.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 official/vision/evaluation/analyze_params_test.py diff --git a/official/vision/evaluation/analyze_params_test.py b/official/vision/evaluation/analyze_params_test.py new file mode 100644 index 00000000..34d768fc --- /dev/null +++ b/official/vision/evaluation/analyze_params_test.py @@ -0,0 +1,42 @@ +import tensorflow as tf +import tensorflow.keras as ks +from official.vision.evaluation import analyze_model + +class paramaterTesting(tf.test.TestCase): + def __init__(self): + self = self + def test_model1(self): + #Tensorflow Model 1 + input_shape = (28, 28, 1) + tfModel = ks.Sequential() + tfModel.add(ks.layers.Conv2D(28, kernel_size=(3,3), input_shape=input_shape)) + tfModel.add(ks.layers.MaxPooling2D(pool_size=(2, 2))) + tfModel.add(ks.layers.Flatten()) # Flattening the 2D arrays for fully connected layers + tfModel.add(ks.layers.Dense(128, activation=tf.nn.relu)) + tfModel.add(ks.layers.Dropout(0.2)) + tfModel.add(ks.layers.Dense(10,activation=tf.nn.softmax)) + ma = analyze_model.ModelAnalysis(tfModel) + result = ma.get_parameters() + expected_result = 600000 + self.assertAllClose(expected_result, result, atol=1e4) + #Pytorch Model that the Tensorflow Model is tested against + # class NeuralNet(nn.Module): + # def __init__(self): + # super(NeuralNet, self).__init__() + # self.conv = nn.Conv2d(1, 28, kernel_size=3) + # self.pool = nn.MaxPool2d(2) + # self.hidden= nn.Linear(28*13*13, 128) + # self.drop = nn.Dropout(0.2) + # self.out = nn.Linear(128, 10) + # self.act = nn.ReLU() + # def forward(self, x): + # x = self.act(self.conv(x)) # [batch_size, 28, 26, 26] + # x = self.pool(x) # [batch_size, 28, 13, 13] + # x = x.view(x.size(0), -1) # [batch_size, 28*13*13=4732] + # x = self.act(self.hidden(x)) # [batch_size, 128] + # x = self.drop(x) + # x = self.out(x) # [batch_size, 10] + # return x + # pytorchModel = NeuralNet() +pt = paramaterTesting() +pt.test_model1() \ No newline at end of file From cb2333d693547030dbec9ded6616502f5dd52ff8 Mon Sep 17 00:00:00 2001 From: VisvShah Date: Wed, 22 Mar 2023 23:22:54 -0400 Subject: [PATCH 5/7] Update panoptic_quality_evaluator.py -Added specific overall pq,sq,rq functions --- .../evaluation/panoptic_quality_evaluator.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/official/vision/evaluation/panoptic_quality_evaluator.py b/official/vision/evaluation/panoptic_quality_evaluator.py index ebaaa6ce..2c766571 100644 --- a/official/vision/evaluation/panoptic_quality_evaluator.py +++ b/official/vision/evaluation/panoptic_quality_evaluator.py @@ -98,6 +98,24 @@ def result(self): self.reset_states() return results + def overall_pq(self): + """Evaluates overall_pq""" + results = self._pq_metric_module.result(self._is_thing) + self.reset_states() + return results["All_pq"] + + def overall_sq(self): + """Evaluates overall_sq""" + results = self._pq_metric_module.result(self._is_thing) + self.reset_states() + return results["All_sq"] + + def overall_rq(self): + """Evaluates overall_rq""" + results = self._pq_metric_module.result(self._is_thing) + self.reset_states() + return results["All_rq"] + def _convert_to_numpy(self, groundtruths, predictions): """Converts tesnors to numpy arrays.""" if groundtruths: From 4c3a9b0004485d6d217f4f24041f2055fd896ec1 Mon Sep 17 00:00:00 2001 From: ManasBommakanti Date: Sat, 25 Mar 2023 16:00:14 -0400 Subject: [PATCH 6/7] Added functions to return PQ, SQ, and RQ separately --- official/vision/evaluation/panoptic_quality_evaluator.py | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/official/vision/evaluation/panoptic_quality_evaluator.py b/official/vision/evaluation/panoptic_quality_evaluator.py index 2c766571..c95e860e 100644 --- a/official/vision/evaluation/panoptic_quality_evaluator.py +++ b/official/vision/evaluation/panoptic_quality_evaluator.py @@ -93,31 +93,27 @@ def reset_states(self): self._pq_metric_module.reset() def result(self): - """Evaluates detection results, and reset_states.""" + """Evaluates detection results""" results = self._pq_metric_module.result(self._is_thing) - self.reset_states() return results def overall_pq(self): """Evaluates overall_pq""" results = self._pq_metric_module.result(self._is_thing) - self.reset_states() return results["All_pq"] def overall_sq(self): """Evaluates overall_sq""" results = self._pq_metric_module.result(self._is_thing) - self.reset_states() return results["All_sq"] def overall_rq(self): """Evaluates overall_rq""" results = self._pq_metric_module.result(self._is_thing) - self.reset_states() return results["All_rq"] def _convert_to_numpy(self, groundtruths, predictions): - """Converts tesnors to numpy arrays.""" + """Converts tensors to numpy arrays.""" if groundtruths: labels = tf.nest.map_structure(lambda x: x.numpy(), groundtruths) numpy_groundtruths = {} From 9eb9c564561263bf061a9d6df61a50f3ed4e2181 Mon Sep 17 00:00:00 2001 From: ManasBommakanti Date: Sat, 25 Mar 2023 16:02:13 -0400 Subject: [PATCH 7/7] Added lines to test PQ, SQ, and RQ functions --- official/vision/evaluation/panoptic_quality_evaluator_test.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/official/vision/evaluation/panoptic_quality_evaluator_test.py b/official/vision/evaluation/panoptic_quality_evaluator_test.py index b9d1454d..a836b162 100644 --- a/official/vision/evaluation/panoptic_quality_evaluator_test.py +++ b/official/vision/evaluation/panoptic_quality_evaluator_test.py @@ -89,6 +89,9 @@ def test_multiple_batches(self): self.assertAlmostEqual(results['All_pq'], 0.63177083) self.assertAlmostEqual(results['All_rq'], 0.75) self.assertAlmostEqual(results['All_sq'], 0.84236111) + self.assertAlmostEqual(pq_evaluator.overall_pq(), 0.63177083) + self.assertAlmostEqual(pq_evaluator.overall_rq(), 0.75) + self.assertAlmostEqual(pq_evaluator.overall_sq(), 0.84236111) self.assertEqual(results['All_num_categories'], 1)