Configuring Metric¶
This document describes how to configure the different types of Metrics.
Accuracy Metric¶
{
"name": "accuracy",
"type": "accuracy",
"sub_type": "accuracy_score",
"user_config": {
"post_processing_func": "post_process",
"user_script": "user_script.py",
"dataloader_func": "create_dataloader",
"batch_size": 1
},
"goal": {
"type": "max-degradation",
"value": 0.01
}
}
from olive.evaluator.metric import AccuracySubType, Metric, MetricType
accuracy_metric = Metric(
name="accuracy",
type=MetricType.ACCURACY,
sub_type=AccuracySubType.ACCURACY_SCORE,
user_config={
"user_script": "user_script.py",
"post_processing_func": "post_process",
"dataloader_func": "create_dataloader",
"batch_size": 1,
},
goal={"type": "max-degradation", "value": 0.01}
)
Please refer to this example
for "user_script.py".
Latency Metric¶
{
"name": "latency",
"type": "latency",
"sub_type": "avg",
"user_config": {
"user_script": "user_script.py",
"dataloader_func": "create_dataloader",
"batch_size": 1
},
"goal": {
"type": "percent-min-improvement",
"value": 20
}
}
from olive.evaluator.metric import LatencySubType, Metric, MetricType
latency_metric = Metric(
name="latency",
type=MetricType.LATENCY,
sub_type=LatencySubType.AVG,
user_config={
"user_script": user_script,
"dataloader_func": "create_dataloader",
"batch_size": 1,
},
goal={"type": "percent-min-improvement", "value": 20},
)
Please refer to this example
for "user_script.py".
Custom Metric¶
{
"name": "accuracy",
"type": "custom",
"user_config": {
"user_script": "user_script.py",
"data_dir": "data",
"batch_size": 16,
"evaluate_func": "eval_accuracy",
},
"goal": {
"type": "max-degradation",
"value": 0.01
}
}
from olive.evaluator.metric import Metric, MetricType
accuracy_metric = Metric(
name="accuracy",
type=MetricType.CUSTOM,
higher_is_better=True,
user_config={
"user_script": "user_script.py",
"data_dir": "data",
"batch_size": 16,
"evaluate_func": "eval_accuracy",
}
goal={"type": "max-degradation", "value": 0.01},
)
Please refer to this example
for "user_script.py".