Skip to content
This repository has been archived by the owner on Jan 26, 2022. It is now read-only.

Running eval script results in empty output directory #199

Open
Terry-Kusunoki-Martin opened this issue Feb 13, 2019 · 2 comments
Open

Running eval script results in empty output directory #199

Terry-Kusunoki-Martin opened this issue Feb 13, 2019 · 2 comments

Comments

@Terry-Kusunoki-Martin
Copy link

Terry-Kusunoki-Martin commented Feb 13, 2019

After training finished, I ran this command:

python tools/test_net.py --dataset my_dataset --cfg configs/baselines/e2e_faster_rcnn_R-50-C4_1x.yaml --load_ckpt Outputs/e2e_faster_rcnn_R-50-C4_1x/Feb06-15-50-25_k2_step/ --multi-gpu-testing

This creates a directory at {the/parent/dir/of/checkpoint}/test. However, the "test" directory itself is empty. I believe it should be writing a .pkl file to the directory, so any reason why it might be empty? The problem still occurs when I specify the output directory.

Here is the full terminal output from running eval command:

INFO test_net.py:  68: Called with args:
INFO test_net.py:  69: Namespace(cfg_file='configs/baselines/e2e_faster_rcnn_R-50-C4_1x.yaml', dataset='my_dataset', load_ckpt='Outputs/e2e_faster_rcnn_R-50-C4_1x/Feb06-15-50-25_k2_step/', load_detectron=None, multi_gpu_testing=True, output_dir='/home/tkmartin/Detectron.pytorch', range=None, set_cfgs=[], vis=False)
INFO test_net.py: 103: Testing with config:
INFO test_net.py: 104: {'BBOX_XFORM_CLIP': 4.135166556742356,
 'CROP_RESIZE_WITH_MAX_POOL': True,
 'CUDA': False,
 'DATA_DIR': '/home/tkmartin/Detectron.pytorch/data',
 'DATA_LOADER': {'NUM_THREADS': 4},
 'DEBUG': False,
 'DEDUP_BOXES': 0.0625,
 'EPS': 1e-14,
 'EXPECTED_RESULTS': [],
 'EXPECTED_RESULTS_ATOL': 0.005,
 'EXPECTED_RESULTS_EMAIL': '',
 'EXPECTED_RESULTS_RTOL': 0.1,
 'FAST_RCNN': {'CONV_HEAD_DIM': 256,
               'MLP_HEAD_DIM': 1024,
               'NUM_STACKED_CONVS': 4,
               'ROI_BOX_HEAD': 'ResNet.ResNet_roi_conv5_head',
               'ROI_XFORM_METHOD': 'RoIAlign',
               'ROI_XFORM_RESOLUTION': 14,
               'ROI_XFORM_SAMPLING_RATIO': 0},
 'FPN': {'COARSEST_STRIDE': 32,
         'DIM': 256,
         'EXTRA_CONV_LEVELS': False,
         'FPN_ON': False,
         'MULTILEVEL_ROIS': False,
         'MULTILEVEL_RPN': False,
         'ROI_CANONICAL_LEVEL': 4,
         'ROI_CANONICAL_SCALE': 224,
         'ROI_MAX_LEVEL': 5,
         'ROI_MIN_LEVEL': 2,
         'RPN_ANCHOR_START_SIZE': 32,
         'RPN_ASPECT_RATIOS': (0.5, 1, 2),
         'RPN_COLLECT_SCALE': 1,
         'RPN_MAX_LEVEL': 6,
         'RPN_MIN_LEVEL': 2,
         'USE_GN': False,
         'ZERO_INIT_LATERAL': False},
 'GROUP_NORM': {'DIM_PER_GP': -1, 'EPSILON': 1e-05, 'NUM_GROUPS': 32},
 'KRCNN': {'CONV_HEAD_DIM': 256,
           'CONV_HEAD_KERNEL': 3,
           'CONV_INIT': 'GaussianFill',
           'DECONV_DIM': 256,
           'DECONV_KERNEL': 4,
           'DILATION': 1,
           'HEATMAP_SIZE': -1,
           'INFERENCE_MIN_SIZE': 0,
           'KEYPOINT_CONFIDENCE': 'bbox',
           'LOSS_WEIGHT': 1.0,
           'MIN_KEYPOINT_COUNT_FOR_VALID_MINIBATCH': 20,
           'NMS_OKS': False,
           'NORMALIZE_BY_VISIBLE_KEYPOINTS': True,
           'NUM_KEYPOINTS': -1,
           'NUM_STACKED_CONVS': 8,
           'ROI_KEYPOINTS_HEAD': '',
           'ROI_XFORM_METHOD': 'RoIAlign',
           'ROI_XFORM_RESOLUTION': 7,
           'ROI_XFORM_SAMPLING_RATIO': 0,
           'UP_SCALE': -1,
           'USE_DECONV': False,
           'USE_DECONV_OUTPUT': False},
 'MATLAB': 'matlab',
 'MODEL': {'BBOX_REG_WEIGHTS': (10.0, 10.0, 5.0, 5.0),
           'CLS_AGNOSTIC_BBOX_REG': False,
           'CONV_BODY': 'ResNet.ResNet50_conv4_body',
           'FASTER_RCNN': True,
           'KEYPOINTS_ON': False,
           'LOAD_IMAGENET_PRETRAINED_WEIGHTS': True,
           'MASK_ON': False,
           'NUM_CLASSES': 2,
           'RPN_ONLY': False,
           'SHARE_RES5': False,
           'TYPE': 'generalized_rcnn',
           'UNSUPERVISED_POSE': False},
 'MRCNN': {'CLS_SPECIFIC_MASK': True,
           'CONV_INIT': 'GaussianFill',
           'DILATION': 2,
           'DIM_REDUCED': 256,
           'MEMORY_EFFICIENT_LOSS': True,
           'RESOLUTION': 14,
           'ROI_MASK_HEAD': '',
           'ROI_XFORM_METHOD': 'RoIAlign',
           'ROI_XFORM_RESOLUTION': 7,
           'ROI_XFORM_SAMPLING_RATIO': 0,
           'THRESH_BINARIZE': 0.5,
           'UPSAMPLE_RATIO': 1,
           'USE_FC_OUTPUT': False,
           'WEIGHT_LOSS_MASK': 1.0},
 'NUM_GPUS': 8,
 'OUTPUT_DIR': 'Outputs',
 'PIXEL_MEANS': array([[[102.9801, 115.9465, 122.7717]]]),
 'POOLING_MODE': 'crop',
 'POOLING_SIZE': 7,
 'PYTORCH_VERSION_LESS_THAN_040': False,
 'RESNETS': {'FREEZE_AT': 2,
             'IMAGENET_PRETRAINED_WEIGHTS': 'data/pretrained_model/resnet50_caffe.pth',
             'NUM_GROUPS': 1,
             'RES5_DILATION': 1,
             'SHORTCUT_FUNC': 'basic_bn_shortcut',
             'STEM_FUNC': 'basic_bn_stem',
             'STRIDE_1X1': True,
             'TRANS_FUNC': 'bottleneck_transformation',
             'USE_GN': False,
             'WIDTH_PER_GROUP': 64},
 'RETINANET': {'ANCHOR_SCALE': 4,
               'ASPECT_RATIOS': (0.5, 1.0, 2.0),
               'BBOX_REG_BETA': 0.11,
               'BBOX_REG_WEIGHT': 1.0,
               'CLASS_SPECIFIC_BBOX': False,
               'INFERENCE_TH': 0.05,
               'LOSS_ALPHA': 0.25,
               'LOSS_GAMMA': 2.0,
               'NEGATIVE_OVERLAP': 0.4,
               'NUM_CONVS': 4,
               'POSITIVE_OVERLAP': 0.5,
               'PRE_NMS_TOP_N': 1000,
               'PRIOR_PROB': 0.01,
               'RETINANET_ON': False,
               'SCALES_PER_OCTAVE': 3,
               'SHARE_CLS_BBOX_TOWER': False,
               'SOFTMAX': False},
 'RFCN': {'PS_GRID_SIZE': 3},
 'RNG_SEED': 3,
 'ROOT_DIR': '/home/tkmartin/Detectron.pytorch',
 'RPN': {'ASPECT_RATIOS': (0.5, 1, 2),
         'CLS_ACTIVATION': 'sigmoid',
         'OUT_DIM': 512,
         'OUT_DIM_AS_IN_DIM': True,
         'RPN_ON': True,
         'SIZES': (32, 64, 128, 256, 512),
         'STRIDE': 16},
 'SOLVER': {'BASE_LR': 0.01,
            'BIAS_DOUBLE_LR': True,
            'BIAS_WEIGHT_DECAY': False,
            'GAMMA': 0.1,
            'LOG_LR_CHANGE_THRESHOLD': 1.1,
            'LRS': [],
            'LR_POLICY': 'steps_with_decay',
            'MAX_ITER': 180000,
            'MOMENTUM': 0.9,
            'SCALE_MOMENTUM': True,
            'SCALE_MOMENTUM_THRESHOLD': 1.1,
            'STEPS': [0, 120000, 160000],
            'STEP_SIZE': 30000,
            'TYPE': 'SGD',
            'WARM_UP_FACTOR': 0.3333333333333333,
            'WARM_UP_ITERS': 500,
            'WARM_UP_METHOD': 'linear',
            'WEIGHT_DECAY': 0.0001,
            'WEIGHT_DECAY_GN': 0.0},
 'TEST': {'BBOX_AUG': {'AREA_TH_HI': 32400,
                       'AREA_TH_LO': 2500,
                       'ASPECT_RATIOS': (),
                       'ASPECT_RATIO_H_FLIP': False,
                       'COORD_HEUR': 'UNION',
                       'ENABLED': False,
                       'H_FLIP': False,
                       'MAX_SIZE': 4000,
                       'SCALES': (),
                       'SCALE_H_FLIP': False,
                       'SCALE_SIZE_DEP': False,
                       'SCORE_HEUR': 'UNION'},
          'BBOX_REG': True,
          'BBOX_VOTE': {'ENABLED': False,
                        'SCORING_METHOD': 'ID',
                        'SCORING_METHOD_BETA': 1.0,
                        'VOTE_TH': 0.8},
          'COMPETITION_MODE': True,
          'DATASETS': (),
          'DETECTIONS_PER_IM': 100,
          'FORCE_JSON_DATASET_EVAL': False,
          'KPS_AUG': {'AREA_TH': 32400,
                      'ASPECT_RATIOS': (),
                      'ASPECT_RATIO_H_FLIP': False,
                      'ENABLED': False,
                      'HEUR': 'HM_AVG',
                      'H_FLIP': False,
                      'MAX_SIZE': 4000,
                      'SCALES': (),
                      'SCALE_H_FLIP': False,
                      'SCALE_SIZE_DEP': False},
          'MASK_AUG': {'AREA_TH': 32400,
                       'ASPECT_RATIOS': (),
                       'ASPECT_RATIO_H_FLIP': False,
                       'ENABLED': False,
                       'HEUR': 'SOFT_AVG',
                       'H_FLIP': False,
                       'MAX_SIZE': 4000,
                       'SCALES': (),
                       'SCALE_H_FLIP': False,
                       'SCALE_SIZE_DEP': False},
          'MAX_SIZE': 1333,
          'NMS': 0.5,
          'PRECOMPUTED_PROPOSALS': False,
          'PROPOSAL_FILES': (),
          'PROPOSAL_LIMIT': 2000,
          'RPN_MIN_SIZE': 0,
          'RPN_NMS_THRESH': 0.7,
          'RPN_POST_NMS_TOP_N': 1000,
          'RPN_PRE_NMS_TOP_N': 6000,
          'SCALE': 800,
          'SCORE_THRESH': 0.05,
          'SOFT_NMS': {'ENABLED': False, 'METHOD': 'linear', 'SIGMA': 0.5}},
 'TRAIN': {'ASPECT_CROPPING': False,
           'ASPECT_GROUPING': True,
           'ASPECT_HI': 2,
           'ASPECT_LO': 0.5,
           'BATCH_SIZE_PER_IM': 512,
           'BBOX_INSIDE_WEIGHTS': (1.0, 1.0, 1.0, 1.0),
           'BBOX_NORMALIZE_MEANS': (0.0, 0.0, 0.0, 0.0),
           'BBOX_NORMALIZE_STDS': (0.1, 0.1, 0.2, 0.2),
           'BBOX_NORMALIZE_TARGETS': True,
           'BBOX_NORMALIZE_TARGETS_PRECOMPUTED': False,
           'BBOX_THRESH': 0.5,
           'BG_THRESH_HI': 0.5,
           'BG_THRESH_LO': 0.0,
           'CROWD_FILTER_THRESH': 0.7,
           'DATASETS': (),
           'FG_FRACTION': 0.25,
           'FG_THRESH': 0.5,
           'FREEZE_CONV_BODY': False,
           'GT_MIN_AREA': -1,
           'IMS_PER_BATCH': 1,
           'MAX_SIZE': 1333,
           'PROPOSAL_FILES': (),
           'RPN_BATCH_SIZE_PER_IM': 256,
           'RPN_FG_FRACTION': 0.5,
           'RPN_MIN_SIZE': 0,
           'RPN_NEGATIVE_OVERLAP': 0.3,
           'RPN_NMS_THRESH': 0.7,
           'RPN_POSITIVE_OVERLAP': 0.7,
           'RPN_POST_NMS_TOP_N': 2000,
           'RPN_PRE_NMS_TOP_N': 12000,
           'RPN_STRADDLE_THRESH': 0,
           'SCALES': (800,),
           'SNAPSHOT_ITERS': 20000,
           'USE_FLIPPED': True},
 'VIS': False,
 'VIS_TH': 0.9}
@lin1061991611
Copy link

im same to u ,did u solved it?

@mingkaid
Copy link

mingkaid commented Aug 9, 2019

Bumping up. Encountered the same issue

Sign up for free to subscribe to this conversation on GitHub. Already have an account? Sign in.
Labels
None yet
Projects
None yet
Development

No branches or pull requests

3 participants