Commits

Miran Levar committed 1d0c8dc

Added icons to widgets.

Comments (0)

Files changed (14)

 recursive-include _multitarget/datasets *
+recursive-include _multitarget/widgets/icons *

_multitarget/widgets/OWBinaryRelevance.py

 <description>Binary relevance learner wrapper</description>
 <priority>1300</priority>
 <tags>multitarget,binary,relevance,wrapper</tags>
+<icon>icons/BinaryRelevance.png</icon>
 
 """
 

_multitarget/widgets/OWClassifierChain.py

 support multitarget problems</description>
 <priority>1100</priority>
 <tags>wrapper,multitarget,chain</tags>
+<icon>icons/ClassifierChain.png</icon>
 """
 
 import Orange

_multitarget/widgets/OWClusteringTree.py

 target classification</description>
 <priority>100</priority>
 <tags>tree,multitarget</tags>
+<icon>icons/ClusteringTree.png</icon>
 
 """
 

_multitarget/widgets/OWEnsembleClassifierChain.py

 <description>Train an ensemble of chain classifiers</description>
 <priority>1200</priority>
 <tags>wrapper,multitarget,chain,ensemble</tags>
+<icon>icons/EnsembleClassifierChain.png</icon>
 """
 
 import Orange

_multitarget/widgets/OWNeuralNetwork.py

 <category>Multitarget</category>
 <priority>50<priority>
 <tags>neural,network,multitarget</tags>
-
+<icon>icons/NeuralNetwork.png</icon>
 
 """
 

_multitarget/widgets/OWTestMultitargetLearners.py

-"""<name>Test Multitarget Learners</name>
+"""
+<icon>icons/TestMTLearners.png</icon>
+<name>Test Multitarget Learners</name>
 </description>A widget for scoring the performance of learning algorithms
 on multitarget domains</description>
 <priority>1000</priority>
                 res, Orange.evaluation.scoring.logloss)
 
 
+def avg_is(res):
+    return Orange.multitarget.scoring.mt_average_score(
+                res, Orange.evaluation.scoring.IS)
+
+
+def flat_is(res):
+    return Orange.multitarget.scoring.mt_flattened_score(
+                res, Orange.evaluation.scoring.IS)
+
+
+def avg_bs(res):
+    return Orange.multitarget.scoring.mt_average_score(
+                res, Orange.evaluation.scoring.Brier_score)
+
+
+def flat_bs(res):
+    return Orange.multitarget.scoring.mt_flattened_score(
+                res, Orange.evaluation.scoring.Brier_score)
+
+
 def avg_rmse(res):
     return Orange.multitarget.scoring.mt_average_score(
                 res, Orange.evaluation.scoring.RMSE)
     cStatistics = \
         [Score(*s) for s in
          [("Average Logloss", "Logloss (average)", avg_logloss, True),
-          ("Flatten Logloss", "Logloss (flatten)", flat_logloss, True),
+          ("Flattened Logloss", "Logloss (flattened)", flat_logloss, False),
           ("Global Accuracy", "Global Accuracy",
            Orange.multitarget.scoring.mt_global_accuracy, True),
           ("Mean Accuracy", "Mean Accuracy",
            Orange.multitarget.scoring.mt_mean_accuracy, True),
+          ("Average Information Score", "Inf. Score (average)", avg_is, True),
+          ("Flattened Information Score", "Inf. Score (flattened)", flat_bs, False),
+          ("Average Brier Score", "Brier (average)", avg_bs, True),
+          ("Flattened Brier Score", "Brier (flattened)", flat_bs, False),
           ("F1 macro", "F1 macro",
            Orange.evaluation.scoring.mlc_F1_macro, False),
           ("F1 micro", "F1 micro",
     rStatistics = \
         [Score(*s) for s in
          [("Average RMSE", "RMSE (average)", avg_rmse, True),
-          ("Flatten RMSE", "RMSE (flatten)", flat_rmse, True)]
+          ("Flattened RMSE", "RMSE (flattened)", flat_rmse, True)]
          ]
 
     def __init__(self, parent=None, signalManager=None,

_multitarget/widgets/icons/BinaryRelevance.png

Added
New image

_multitarget/widgets/icons/ClassifierChain.png

Added
New image

_multitarget/widgets/icons/ClusteringTree.png

Added
New image

_multitarget/widgets/icons/EnsembleClassifierChain.png

Added
New image

_multitarget/widgets/icons/NeuralNetwork.png

Added
New image

_multitarget/widgets/icons/TestMTLearners.png

Added
New image

_multitarget/widgets/icons/TestMTLearners_2.png

Added
New image