Skip to content

configs/evaluation/projects/bridging/metrics_coco_ccrop.yaml

# @package _global_
# Evaluate center crop masks on COCO.

defaults:
  - /evaluation_config  # (1)!
  - /evaluation/projects/bridging/_base_metrics # (2)!
  - /evaluation/projects/bridging/_preprocessing_coco # (3)!
  - /evaluation/projects/bridging/_metrics_discovery_masks # (4)!
  - /evaluation/projects/bridging/_metrics_segmentation # (5)!
  - /dataset: coco_nocrowd  # (6)!
  - _self_

eval_batch_size: 16

plugins:
  03b_preprocessing:
    evaluation_transforms:
      image:
        _target_: torchvision.transforms.Compose
        transforms:
          - _target_: torchvision.transforms.ToTensor
          - _target_: torchvision.transforms.Resize
            size: 224
            interpolation: ${torchvision_interpolation_mode:BICUBIC}
          - "${lambda_fn:'lambda image: image.clamp(0.0, 1.0)'}"
          - _target_: torchvision.transforms.CenterCrop
            size: 224
          - _target_: torchvision.transforms.Normalize
            mean: [0.485, 0.456, 0.406]
            std: [0.229, 0.224, 0.225]
      instance_mask:
        _target_: torchvision.transforms.Compose
        transforms:
          - _target_: ocl.preprocessing.DenseMaskToTensor
          - _target_: ocl.preprocessing.ResizeNearestExact
            size: 320
          - _target_: torchvision.transforms.CenterCrop
            size: 320
      segmentation_mask:
        _target_: torchvision.transforms.Compose
        transforms:
          - _target_: ocl.preprocessing.DenseMaskToTensor
          - _target_: ocl.preprocessing.ResizeNearestExact
            size: 320
          - _target_: torchvision.transforms.CenterCrop
            size: 320
  1. /evaluation_config
  2. /evaluation/projects/bridging/_base_metrics
  3. /evaluation/projects/bridging/_preprocessing_coco
  4. /evaluation/projects/bridging/_metrics_discovery_masks
  5. /evaluation/projects/bridging/_metrics_segmentation
  6. /dataset/coco_nocrowd