find_aniso_shape_modelT_find_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model (Operator)

Name

find_aniso_shape_modelT_find_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model — Find the best matches of an anisotropically scaled shape model in an image.

Signature

find_aniso_shape_model(Image : : ModelID, AngleStart, AngleExtent, ScaleRMin, ScaleRMax, ScaleCMin, ScaleCMax, MinScore, NumMatches, MaxOverlap, SubPixel, NumLevels, Greediness : Row, Column, Angle, ScaleR, ScaleC, Score)

Herror T_find_aniso_shape_model(const Hobject Image, const Htuple ModelID, const Htuple AngleStart, const Htuple AngleExtent, const Htuple ScaleRMin, const Htuple ScaleRMax, const Htuple ScaleCMin, const Htuple ScaleCMax, const Htuple MinScore, const Htuple NumMatches, const Htuple MaxOverlap, const Htuple SubPixel, const Htuple NumLevels, const Htuple Greediness, Htuple* Row, Htuple* Column, Htuple* Angle, Htuple* ScaleR, Htuple* ScaleC, Htuple* Score)

void FindAnisoShapeModel(const HObject& Image, const HTuple& ModelID, const HTuple& AngleStart, const HTuple& AngleExtent, const HTuple& ScaleRMin, const HTuple& ScaleRMax, const HTuple& ScaleCMin, const HTuple& ScaleCMax, const HTuple& MinScore, const HTuple& NumMatches, const HTuple& MaxOverlap, const HTuple& SubPixel, const HTuple& NumLevels, const HTuple& Greediness, HTuple* Row, HTuple* Column, HTuple* Angle, HTuple* ScaleR, HTuple* ScaleC, HTuple* Score)

void HShapeModel::FindAnisoShapeModel(const HImage& Image, double AngleStart, double AngleExtent, double ScaleRMin, double ScaleRMax, double ScaleCMin, double ScaleCMax, const HTuple& MinScore, Hlong NumMatches, double MaxOverlap, const HTuple& SubPixel, const HTuple& NumLevels, double Greediness, HTuple* Row, HTuple* Column, HTuple* Angle, HTuple* ScaleR, HTuple* ScaleC, HTuple* Score) const

void HShapeModel::FindAnisoShapeModel(const HImage& Image, double AngleStart, double AngleExtent, double ScaleRMin, double ScaleRMax, double ScaleCMin, double ScaleCMax, double MinScore, Hlong NumMatches, double MaxOverlap, const HString& SubPixel, Hlong NumLevels, double Greediness, HTuple* Row, HTuple* Column, HTuple* Angle, HTuple* ScaleR, HTuple* ScaleC, HTuple* Score) const

void HShapeModel::FindAnisoShapeModel(const HImage& Image, double AngleStart, double AngleExtent, double ScaleRMin, double ScaleRMax, double ScaleCMin, double ScaleCMax, double MinScore, Hlong NumMatches, double MaxOverlap, const char* SubPixel, Hlong NumLevels, double Greediness, HTuple* Row, HTuple* Column, HTuple* Angle, HTuple* ScaleR, HTuple* ScaleC, HTuple* Score) const

void HShapeModel::FindAnisoShapeModel(const HImage& Image, double AngleStart, double AngleExtent, double ScaleRMin, double ScaleRMax, double ScaleCMin, double ScaleCMax, double MinScore, Hlong NumMatches, double MaxOverlap, const wchar_t* SubPixel, Hlong NumLevels, double Greediness, HTuple* Row, HTuple* Column, HTuple* Angle, HTuple* ScaleR, HTuple* ScaleC, HTuple* Score) const   ( Windows only)

void HImage::FindAnisoShapeModel(const HShapeModel& ModelID, double AngleStart, double AngleExtent, double ScaleRMin, double ScaleRMax, double ScaleCMin, double ScaleCMax, const HTuple& MinScore, Hlong NumMatches, double MaxOverlap, const HTuple& SubPixel, const HTuple& NumLevels, double Greediness, HTuple* Row, HTuple* Column, HTuple* Angle, HTuple* ScaleR, HTuple* ScaleC, HTuple* Score) const

void HImage::FindAnisoShapeModel(const HShapeModel& ModelID, double AngleStart, double AngleExtent, double ScaleRMin, double ScaleRMax, double ScaleCMin, double ScaleCMax, double MinScore, Hlong NumMatches, double MaxOverlap, const HString& SubPixel, Hlong NumLevels, double Greediness, HTuple* Row, HTuple* Column, HTuple* Angle, HTuple* ScaleR, HTuple* ScaleC, HTuple* Score) const

void HImage::FindAnisoShapeModel(const HShapeModel& ModelID, double AngleStart, double AngleExtent, double ScaleRMin, double ScaleRMax, double ScaleCMin, double ScaleCMax, double MinScore, Hlong NumMatches, double MaxOverlap, const char* SubPixel, Hlong NumLevels, double Greediness, HTuple* Row, HTuple* Column, HTuple* Angle, HTuple* ScaleR, HTuple* ScaleC, HTuple* Score) const

void HImage::FindAnisoShapeModel(const HShapeModel& ModelID, double AngleStart, double AngleExtent, double ScaleRMin, double ScaleRMax, double ScaleCMin, double ScaleCMax, double MinScore, Hlong NumMatches, double MaxOverlap, const wchar_t* SubPixel, Hlong NumLevels, double Greediness, HTuple* Row, HTuple* Column, HTuple* Angle, HTuple* ScaleR, HTuple* ScaleC, HTuple* Score) const   ( Windows only)

static void HOperatorSet.FindAnisoShapeModel(HObject image, HTuple modelID, HTuple angleStart, HTuple angleExtent, HTuple scaleRMin, HTuple scaleRMax, HTuple scaleCMin, HTuple scaleCMax, HTuple minScore, HTuple numMatches, HTuple maxOverlap, HTuple subPixel, HTuple numLevels, HTuple greediness, out HTuple row, out HTuple column, out HTuple angle, out HTuple scaleR, out HTuple scaleC, out HTuple score)

void HShapeModel.FindAnisoShapeModel(HImage image, double angleStart, double angleExtent, double scaleRMin, double scaleRMax, double scaleCMin, double scaleCMax, HTuple minScore, int numMatches, double maxOverlap, HTuple subPixel, HTuple numLevels, double greediness, out HTuple row, out HTuple column, out HTuple angle, out HTuple scaleR, out HTuple scaleC, out HTuple score)

void HShapeModel.FindAnisoShapeModel(HImage image, double angleStart, double angleExtent, double scaleRMin, double scaleRMax, double scaleCMin, double scaleCMax, double minScore, int numMatches, double maxOverlap, string subPixel, int numLevels, double greediness, out HTuple row, out HTuple column, out HTuple angle, out HTuple scaleR, out HTuple scaleC, out HTuple score)

void HImage.FindAnisoShapeModel(HShapeModel modelID, double angleStart, double angleExtent, double scaleRMin, double scaleRMax, double scaleCMin, double scaleCMax, HTuple minScore, int numMatches, double maxOverlap, HTuple subPixel, HTuple numLevels, double greediness, out HTuple row, out HTuple column, out HTuple angle, out HTuple scaleR, out HTuple scaleC, out HTuple score)

void HImage.FindAnisoShapeModel(HShapeModel modelID, double angleStart, double angleExtent, double scaleRMin, double scaleRMax, double scaleCMin, double scaleCMax, double minScore, int numMatches, double maxOverlap, string subPixel, int numLevels, double greediness, out HTuple row, out HTuple column, out HTuple angle, out HTuple scaleR, out HTuple scaleC, out HTuple score)

def find_aniso_shape_model(image: HObject, model_id: HHandle, angle_start: float, angle_extent: float, scale_rmin: float, scale_rmax: float, scale_cmin: float, scale_cmax: float, min_score: MaybeSequence[float], num_matches: int, max_overlap: float, sub_pixel: MaybeSequence[str], num_levels: MaybeSequence[int], greediness: float) -> Tuple[Sequence[float], Sequence[float], Sequence[float], Sequence[float], Sequence[float], Sequence[float]]

Description

The operator find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model finds the best NumMatchesNumMatchesNumMatchesnumMatchesnum_matches instances of the anisotropically scaled shape model ModelIDModelIDModelIDmodelIDmodel_id in the input image ImageImageImageimageimage. The model must have been created previously by calling create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model or read_shape_modelread_shape_modelReadShapeModelReadShapeModelread_shape_model.

The position, rotation, and scale in the row and column direction of the found instances of the model are returned in RowRowRowrowrow, ColumnColumnColumncolumncolumn, AngleAngleAngleangleangle, ScaleRScaleRScaleRscaleRscale_r, and ScaleCScaleCScaleCscaleCscale_c. Additionally, the score of each found instance is returned in ScoreScoreScorescorescore.

Input parameters in detail

ImageImageImageimageimage and its domain:

The domain of the image ImageImageImageimageimage determines the search space for the reference point of the model, i.e., for the center of gravity of the domain (region) of the image that was used to create the shape model with create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model. A different origin set with set_shape_model_originset_shape_model_originSetShapeModelOriginSetShapeModelOriginset_shape_model_origin is not taken into account. The model is searched within those points of the domain of the image, in which the model lies completely within the image. This means that the model will not be found if it extends beyond the borders of the image, even if it would achieve a score greater than MinScoreMinScoreMinScoreminScoremin_score (see below). Note that, if for a certain pyramid level the model touches the image border, it might not be found even if it lies completely within the original image. As a rule of thumb, the model might not be found if its distance to an image border falls below . This behavior can be changed with set_system('border_shape_models','true')set_system("border_shape_models","true")SetSystem("border_shape_models","true")SetSystem("border_shape_models","true")set_system("border_shape_models","true") for all models or with set_shape_model_param(ModelID,'border_shape_models','true')set_shape_model_param(ModelID,"border_shape_models","true")SetShapeModelParam(ModelID,"border_shape_models","true")SetShapeModelParam(ModelID,"border_shape_models","true")set_shape_model_param(ModelID,"border_shape_models","true") for a specific model, which will cause models that extend beyond the image border to be found if they achieve a score greater than MinScoreMinScoreMinScoreminScoremin_score. Here, points lying outside the image are regarded as being occluded, i.e., they lower the score. It should be noted that the runtime of the search will increase in this mode. Note further, that in rare cases, which occur typically only for artificial images, the model might not be found also if for certain pyramid levels the model touches the border of the reduced domain. Then, it may help to enlarge the reduced domain by using, e.g., dilation_circledilation_circleDilationCircleDilationCircledilation_circle.

AngleStartAngleStartAngleStartangleStartangle_start, AngleExtentAngleExtentAngleExtentangleExtentangle_extent, ScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin, ScaleRMaxScaleRMaxScaleRMaxscaleRMaxscale_rmax, ScaleCMinScaleCMinScaleCMinscaleCMinscale_cmin, ScaleCMaxScaleCMaxScaleCMaxscaleCMaxscale_cmax:

The parameters AngleStartAngleStartAngleStartangleStartangle_start and AngleExtentAngleExtentAngleExtentangleExtentangle_extent determine the range of rotations for which the model is searched. The parameters ScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin, ScaleRMaxScaleRMaxScaleRMaxscaleRMaxscale_rmax, ScaleCMinScaleCMinScaleCMinscaleCMinscale_cmin, and ScaleCMaxScaleCMaxScaleCMaxscaleCMaxscale_cmax determine the range of scales in the row and column directions for which the model is searched. If necessary, both ranges are clipped to the range given when the model was created with create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model. In particular, this means that the angle ranges of the model and the search must overlap.

If in ModelIDModelIDModelIDmodelIDmodel_id a model is passed that was created by using create_shape_modelcreate_shape_modelCreateShapeModelCreateShapeModelcreate_shape_model or create_scaled_shape_modelcreate_scaled_shape_modelCreateScaledShapeModelCreateScaledShapeModelcreate_scaled_shape_model then the model is searched with an isotropic scaling of 1.0 or with an isotropic scaling within the range from ScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin to ScaleRMaxScaleRMaxScaleRMaxscaleRMaxscale_rmax, respectively. In this case, for ScaleRScaleRScaleRscaleRscale_r and ScaleCScaleCScaleCscaleCscale_c identical values are returned.

Note that in some cases instances with a rotation or scale that is slightly outside the specified range are found. This may happen if the specified range is smaller than the range given during the creation of the model. AngleStartAngleStartAngleStartangleStartangle_start and AngleExtentAngleExtentAngleExtentangleExtentangle_extent as well as ScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin/ScaleCMinScaleCMinScaleCMinscaleCMinscale_cmin and ScaleRMaxScaleRMaxScaleRMaxscaleRMaxscale_rmax/ScaleCMaxScaleCMaxScaleCMaxscaleCMaxscale_cmax are checked only at the highest pyramid level. Matches that are found on the highest pyramid level are refined to the lowest pyramid level. For performance reasons, however, during the refinement it is no longer checked whether the matches are still within the specified ranges.

MinScoreMinScoreMinScoreminScoremin_score:

The parameter MinScoreMinScoreMinScoreminScoremin_score determines what score a potential match must at least have to be regarded as an instance of the model in the image. The larger MinScoreMinScoreMinScoreminScoremin_score is chosen, the faster the search is. If the model can be expected never to be occluded in the images, MinScoreMinScoreMinScoreminScoremin_score may be set as high as 0.8 or even 0.9. If the matches are not tracked to the lowest pyramid level (see below) it might happen that instances with a score slightly below MinScoreMinScoreMinScoreminScoremin_score are found.

In case that the shape model has been extended by clutter parameters with set_shape_model_clutterset_shape_model_clutterSetShapeModelClutterSetShapeModelClutterset_shape_model_clutter and thus 'use_clutter'"use_clutter""use_clutter""use_clutter""use_clutter" is enabled, MinScoreMinScoreMinScoreminScoremin_score expects a second value which determines what clutter value a potential match must at most have to be regarded as an instance of the model in the image. The runtime using clutter parameters will be at least as high as the runtime without clutter parameters and NumMatchesNumMatchesNumMatchesnumMatchesnum_matches set to 0. Changing this second value does not influence the runtime.

NumMatchesNumMatchesNumMatchesnumMatchesnum_matches:

The maximum number of instances to be found can be determined with NumMatchesNumMatchesNumMatchesnumMatchesnum_matches. If more than NumMatchesNumMatchesNumMatchesnumMatchesnum_matches instances with a score greater than MinScoreMinScoreMinScoreminScoremin_score are found in the image, only the best NumMatchesNumMatchesNumMatchesnumMatchesnum_matches instances are returned. If fewer than NumMatchesNumMatchesNumMatchesnumMatchesnum_matches are found, only that number is returned, i.e., the parameter MinScoreMinScoreMinScoreminScoremin_score takes precedence over NumMatchesNumMatchesNumMatchesnumMatchesnum_matches. If all model instances exceeding MinScoreMinScoreMinScoreminScoremin_score in the image should be found, NumMatchesNumMatchesNumMatchesnumMatchesnum_matches must be set to 0.

When tracking the matches through the image pyramid, on each level (except the top level), some less promising matches are rejected based on NumMatchesNumMatchesNumMatchesnumMatchesnum_matches. Thus, it is possible that some matches are rejected that would have had a higher score on the lowest pyramid level. Due to this, for example, the found match for NumMatchesNumMatchesNumMatchesnumMatchesnum_matches set to 1 might be different from the match with the highest score returned when setting NumMatchesNumMatchesNumMatchesnumMatchesnum_matches to 0 or > 1.

If multiple objects with a similar score are expected, but only the one with the highest score should be returned, it might be preferable to raise NumMatchesNumMatchesNumMatchesnumMatchesnum_matches, and then select the match with the highest score.

In case that the shape model has been extended by clutter parameters using set_shape_model_clutterset_shape_model_clutterSetShapeModelClutterSetShapeModelClutterset_shape_model_clutter, NumMatchesNumMatchesNumMatchesnumMatchesnum_matches also considers the second value passed in MinScoreMinScoreMinScoreminScoremin_score: If more than NumMatchesNumMatchesNumMatchesnumMatchesnum_matches instances with a score greater than the first entry of MinScoreMinScoreMinScoreminScoremin_score and a clutter score smaller than the second entry of MinScoreMinScoreMinScoreminScoremin_score are found in the image, only the best NumMatchesNumMatchesNumMatchesnumMatchesnum_matches instances with respect to clutter are returned. Still, MinScoreMinScoreMinScoreminScoremin_score takes precedence over NumMatchesNumMatchesNumMatchesnumMatchesnum_matches and NumMatchesNumMatchesNumMatchesnumMatchesnum_matches must be set to 0 if all model instances fulfilling the conditions imposed by MinScoreMinScoreMinScoreminScoremin_score should be found. Please note that using clutter parameters, when tracking the matches through the image pyramid, no matches are rejected. Thus the runtime using clutter parameters will be at least as high as the runtime without clutter parameters and NumMatchesNumMatchesNumMatchesnumMatchesnum_matches set to 0.

MaxOverlapMaxOverlapMaxOverlapmaxOverlapmax_overlap:

If the model exhibits symmetries it may happen that multiple instances with similar positions but different rotations are found in the image. The parameter MaxOverlapMaxOverlapMaxOverlapmaxOverlapmax_overlap determines by what fraction (i.e., a number between 0 and 1) two instances may at most overlap in order to consider them as different instances, and hence to be returned separately. If two instances overlap each other by more than MaxOverlapMaxOverlapMaxOverlapmaxOverlapmax_overlap only the best instance is returned. The calculation of the overlap is based on the smallest enclosing rectangle of arbitrary orientation (see smallest_rectangle2smallest_rectangle2SmallestRectangle2SmallestRectangle2smallest_rectangle2) of the found instances. If MaxOverlapMaxOverlapMaxOverlapmaxOverlapmax_overlap=0, the found instances may not overlap at all, while for MaxOverlapMaxOverlapMaxOverlapmaxOverlapmax_overlap=1 all instances are returned.

SubPixelSubPixelSubPixelsubPixelsub_pixel:

The parameter SubPixelSubPixelSubPixelsubPixelsub_pixel determines whether the instances should be extracted with subpixel accuracy. If SubPixelSubPixelSubPixelsubPixelsub_pixel is set to 'none'"none""none""none""none" (or 'false'"false""false""false""false" for backwards compatibility) the model's pose is only determined with pixel accuracy and the angle and scale resolution that was specified with create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model. If SubPixelSubPixelSubPixelsubPixelsub_pixel is set to 'interpolation'"interpolation""interpolation""interpolation""interpolation" (or 'true'"true""true""true""true") the position as well as the rotation and scale are determined with subpixel accuracy. In this mode, the model's pose is interpolated from the score function. This mode costs almost no computation time and achieves an accuracy that is high enough for most applications. In some applications, however, the accuracy requirements are extremely high. In these cases, the model's pose can be determined through a least-squares adjustment, i.e., by minimizing the distances of the model points to their corresponding image points. In contrast to 'interpolation'"interpolation""interpolation""interpolation""interpolation", this mode requires additional computation time. The different modes for least-squares adjustment ('least_squares'"least_squares""least_squares""least_squares""least_squares", 'least_squares_high'"least_squares_high""least_squares_high""least_squares_high""least_squares_high", and 'least_squares_very_high'"least_squares_very_high""least_squares_very_high""least_squares_very_high""least_squares_very_high") can be used to determine the accuracy with which the minimum distance is being searched. The higher the accuracy is chosen, the longer the subpixel extraction will take, however. Usually, SubPixelSubPixelSubPixelsubPixelsub_pixel should be set to 'interpolation'"interpolation""interpolation""interpolation""interpolation". If least-squares adjustment is desired, 'least_squares'"least_squares""least_squares""least_squares""least_squares" should be chosen because this results in the best trade-off between runtime and accuracy.

Objects that are slightly deformed with respect to the model, in some cases cannot be found or are found but only with a low accuracy. For such objects it is possible to additionally pass a maximal allowable object deformation in the parameter SubPixelSubPixelSubPixelsubPixelsub_pixel. The deformation must be specified in pixels. This can be done by passing the optional parameter value 'max_deformation '"max_deformation ""max_deformation ""max_deformation ""max_deformation " followed by an integer value between 0 and 32 (in the same string), which specifies the maximum deformation. For example, if the shape of the object may be deformed by up to 2 pixels with respect to the shape that is stored in the model, the value 'max_deformation 2'"max_deformation 2""max_deformation 2""max_deformation 2""max_deformation 2" must be passed in SubPixelSubPixelSubPixelsubPixelsub_pixel in addition to the above described mode for the subpixel extraction, i.e., for example ['least_squares', 'max_deformation 2']["least_squares", "max_deformation 2"]["least_squares", "max_deformation 2"]["least_squares", "max_deformation 2"]["least_squares", "max_deformation 2"]. Passing the value 'max_deformation 0'"max_deformation 0""max_deformation 0""max_deformation 0""max_deformation 0" corresponds to a search without allowing deformations, i.e., the behavior is the same as if no 'max_deformation '"max_deformation ""max_deformation ""max_deformation ""max_deformation " is passed. Note that higher values for the maximum deformation often result in an increased runtime. Furthermore, the higher the deformation value is chosen, the higher is the risk of finding wrong model instances. Both problems mainly arise when searching for small objects or for objects with fine structures. This is because such kinds of objects for higher deformations lose their characteristic shape, which is important for a robust search. Also note that for higher deformations the accuracy of partially occluded objects might decrease if clutter is present close to the object. Consequently, the maximum deformation should be chosen as small as possible and only as high as necessary. Approximately rotationally symmetric objects may not be found if 'max_deformation'"max_deformation""max_deformation""max_deformation""max_deformation" and AngleExtentAngleExtentAngleExtentangleExtentangle_extent are both set to a value greater than 0. In that case, ambiguities may occur that cannot be resolved, and the match is rejected as false. If this happens, try to set either 'max_deformation'"max_deformation""max_deformation""max_deformation""max_deformation" or AngleExtentAngleExtentAngleExtentangleExtentangle_extent to 0, or adjust the model such that symmetries are reduced. When specifying a deformation higher than 0 the computation of the score depends on the chosen value for the subpixel extraction. In most cases, the score of a match changes if 'least_squares'"least_squares""least_squares""least_squares""least_squares", 'least_squares_high'"least_squares_high""least_squares_high""least_squares_high""least_squares_high", or 'least_squares_very_high'"least_squares_very_high""least_squares_very_high""least_squares_very_high""least_squares_very_high" (see above) is chosen for the subpixel extraction (in comparison to 'none'"none""none""none""none" or 'interpolation'"interpolation""interpolation""interpolation""interpolation"). Furthermore, if one of the least-squares adjustments is selected the score might increase when increasing the maximum deformation because then for the model points more corresponding image points can be found. To get a meaningful score value and to avoid erroneous matches, we recommend to always combine the allowance of a deformation with a least-squares adjustment.

NumLevelsNumLevelsNumLevelsnumLevelsnum_levels:

The number of pyramid levels used during the search is determined with NumLevelsNumLevelsNumLevelsnumLevelsnum_levels. If necessary, the number of levels is clipped to the range given when the shape model was created with create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model. If NumLevelsNumLevelsNumLevelsnumLevelsnum_levels is set to 0, the number of pyramid levels specified in create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model is used.

In certain cases, the number of pyramid levels that was determined automatically with, for example, create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model may be too high. The consequence may be that some matches that may have a high final score are rejected on the highest pyramid level and thus are not found. Instead of setting MinScoreMinScoreMinScoreminScoremin_score to a very low value to find all matches, it may be better to query the value of NumLevelsNumLevelsNumLevelsnumLevelsnum_levels with get_shape_model_paramsget_shape_model_paramsGetShapeModelParamsGetShapeModelParamsget_shape_model_params and then use a slightly lower value in find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model. This approach is often better regarding the speed and robustness of the matching.

Optionally, NumLevelsNumLevelsNumLevelsnumLevelsnum_levels can contain a second value that determines the lowest pyramid level to which the found matches are tracked. Hence, a value of [4,2] for NumLevelsNumLevelsNumLevelsnumLevelsnum_levels means that the matching starts at the fourth pyramid level and tracks the matches to the second lowest pyramid level (the lowest pyramid level is denoted by a value of 1). This mechanism can be used to decrease the runtime of the matching. It should be noted, however, that in general the accuracy of the extracted pose parameters is lower in this mode than in the normal mode, in which the matches are tracked to the lowest pyramid level. Hence, if a high accuracy is desired, SubPixelSubPixelSubPixelsubPixelsub_pixel should be set to at least 'least_squares'"least_squares""least_squares""least_squares""least_squares". If the lowest pyramid level to use is chosen too large, it may happen that the desired accuracy cannot be achieved, or that wrong instances of the model are found because the model is not specific enough on the higher pyramid levels to facilitate a reliable selection of the correct instance of the model. In this case, the lowest pyramid level to use must be set to a smaller value.

In input images of poor quality, i.e., in images that are, e.g., defocused, deformed, or noisy, often no instances of the shape model can be found on the lowest pyramid level. The reason for this behavior is the missing or deformed edge information which is a result of the poor image quality. Nevertheless, the edge information may be sufficient on higher pyramid levels. But keep in mind the above mentioned restrictions on accuracy and robustness if instances that were found on higher pyramid levels are used. The selection of the suitable pyramid level, i.e., the lowest pyramid level on which at least one instance of the shape model can be found, depends on the model and on the input image. This pyramid level may vary from image to image. To facilitate the matching on images of poor quality, the lowest pyramid level on which at least one instance of the model can be found can be determined automatically during the matching. To activate this mechanism, i.e., to use the so-called 'increased tolerance mode', the lowest pyramid level must be specified negatively in NumLevelsNumLevelsNumLevelsnumLevelsnum_levels. If, e.g., NumLevelsNumLevelsNumLevelsnumLevelsnum_levels is set to [4,-2], the matching starts at the fourth pyramid level and tracks the matches to the second lowest pyramid level. This means that an instance of the shape model is searched on the pyramid level 2. If no instance of the model can be found on this pyramid level, the lowest pyramid level is determined on which at least one instance of the model can be found. The instances of this pyramid level will then be returned.

If the ModelIDModelIDModelIDmodelIDmodel_id was adapted with adapt_shape_model_high_noiseadapt_shape_model_high_noiseAdaptShapeModelHighNoiseAdaptShapeModelHighNoiseadapt_shape_model_high_noise the estimated lowest pyramid level will be used by default. However, the user can override the estimated lowest pyramid level by providing two values to NumLevelsNumLevelsNumLevelsnumLevelsnum_levels and explicitly setting the lowest pyramid level.

GreedinessGreedinessGreedinessgreedinessgreediness:

The parameter GreedinessGreedinessGreedinessgreedinessgreediness determines how “greedily” the search should be carried out. If GreedinessGreedinessGreedinessgreedinessgreediness=0, a safe search heuristic is used, which always finds the model if it is visible in the image and the other parameters are set appropriately. However, the search will be relatively time consuming in this case. If GreedinessGreedinessGreedinessgreedinessgreediness=1, an unsafe search heuristic is used, which may cause the model not to be found in rare cases, even though it is visible in the image. For GreedinessGreedinessGreedinessgreedinessgreediness=1, the maximum search speed is achieved. In almost all cases, the shape model will always be found for GreedinessGreedinessGreedinessgreedinessgreediness=0.9.

Output parameters in detail

RowRowRowrowrow, ColumnColumnColumncolumncolumn, AngleAngleAngleangleangle, ScaleRScaleRScaleRscaleRscale_r, ScaleCScaleCScaleCscaleCscale_c:

The position, rotation, and scale in the row and column direction of the found instances of the model are returned in RowRowRowrowrow, ColumnColumnColumncolumncolumn, AngleAngleAngleangleangle, ScaleRScaleRScaleRscaleRscale_r, and ScaleCScaleCScaleCscaleCscale_c. The coordinates RowRowRowrowrow and ColumnColumnColumncolumncolumn are related to the position of the origin of the shape model in the search image. However, RowRowRowrowrow and ColumnColumnColumncolumncolumn do not exactly correspond to this position. Instead, find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model returns slightly modified values that are optimized for creating a transformation matrix, that can be used for alignment or visualization of the model contours. (This has to do with the way HALCON transforms iconic objects, see affine_trans_pixelaffine_trans_pixelAffineTransPixelAffineTransPixelaffine_trans_pixel). The example below shows how to create the transformation matrix for alignment of the found matches and how to use it to display them in the search image.

By default, the model origin is the center of gravity of the domain (region) of the image that was used to create the shape model with create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model. A different origin can be set with set_shape_model_originset_shape_model_originSetShapeModelOriginSetShapeModelOriginset_shape_model_origin.

ScoreScoreScorescorescore:

The score of each found instance is returned in ScoreScoreScorescorescore. The score is a number between 0 and 1, which is an approximate measure of how much of the model is visible in the image. If, for example, half of the model is occluded, the score cannot exceed 0.5.

In case that the shape model has been extended by clutter parameters using set_shape_model_clutterset_shape_model_clutterSetShapeModelClutterSetShapeModelClutterset_shape_model_clutter, following the above values ScoreScoreScorescorescore also returns the clutter scores of each found instance. If, for example, half of the clutter region is filled by clutter edges, the clutter score will equal 0.5. If, e.g., two instances are found, the score is 0.9 for the first instance and 0.8 for the second instance, and the clutter score is 0.2 for the first instance and 0.1 for the second instance, ScoreScoreScorescorescore = [0.9,0.8,0.2,0.1] is returned. Please note that of all shape-based matching results, clutter scores are affected the most when a variation of illumination occurs.

Specifying a timeout

Using the operator set_shape_model_paramset_shape_model_paramSetShapeModelParamSetShapeModelParamset_shape_model_param you can specify a 'timeout'"timeout""timeout""timeout""timeout" for find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model. If find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model reaches this 'timeout'"timeout""timeout""timeout""timeout", it terminates without results and returns the error code 9400 (H_ERR_TIMEOUT). Depending on the scaling ranges specified by ScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin, ScaleRMaxScaleRMaxScaleRMaxscaleRMaxscale_rmax, ScaleCMinScaleCMinScaleCMinscaleCMinscale_cmin, and ScaleCMaxScaleCMaxScaleCMaxscaleCMaxscale_cmax, find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model needs a significant amount of time to free cached transformations if the shape model is not pregenerated. As this transformations have to be freed after a timeout occurs, the runtime of find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model exceeds the value of the specified 'timeout'"timeout""timeout""timeout""timeout" by this time.

Visualization of the results

To display the results found by shape-based matching, we highly recommend the usage of the procedure dev_display_shape_matching_results.

Further Information

For an explanation of the different 2D coordinate systems used in HALCON, see the introduction of chapter Transformations / 2D Transformations.

Execution Information

This operator supports canceling timeouts and interrupts.

Parameters

ImageImageImageimageimage (input_object)  (multichannel-)image objectHImageHObjectHObjectHobject (byte / uint2)

Input image in which the model should be found.

ModelIDModelIDModelIDmodelIDmodel_id (input_control)  shape_model HShapeModel, HTupleHHandleHTupleHtuple (handle) (IntPtr) (HHandle) (handle)

Handle of the model.

AngleStartAngleStartAngleStartangleStartangle_start (input_control)  angle.rad HTuplefloatHTupleHtuple (real) (double) (double) (double)

Smallest rotation of the model.

Default: -0.39

Suggested values: -3.14, -1.57, -0.79, -0.39, -0.20, 0.0

AngleExtentAngleExtentAngleExtentangleExtentangle_extent (input_control)  angle.rad HTuplefloatHTupleHtuple (real) (double) (double) (double)

Extent of the rotation angles.

Default: 0.79

Suggested values: 6.29, 3.14, 1.57, 0.79, 0.39, 0.0

Restriction: AngleExtent >= 0

ScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin (input_control)  number HTuplefloatHTupleHtuple (real) (double) (double) (double)

Minimum scale of the model in the row direction.

Default: 0.9

Suggested values: 0.5, 0.6, 0.7, 0.8, 0.9, 1.0

Restriction: ScaleRMin > 0

ScaleRMaxScaleRMaxScaleRMaxscaleRMaxscale_rmax (input_control)  number HTuplefloatHTupleHtuple (real) (double) (double) (double)

Maximum scale of the model in the row direction.

Default: 1.1

Suggested values: 1.0, 1.1, 1.2, 1.3, 1.4, 1.5

Restriction: ScaleRMax >= ScaleRMin

ScaleCMinScaleCMinScaleCMinscaleCMinscale_cmin (input_control)  number HTuplefloatHTupleHtuple (real) (double) (double) (double)

Minimum scale of the model in the column direction.

Default: 0.9

Suggested values: 0.5, 0.6, 0.7, 0.8, 0.9, 1.0

Restriction: ScaleCMin > 0

ScaleCMaxScaleCMaxScaleCMaxscaleCMaxscale_cmax (input_control)  number HTuplefloatHTupleHtuple (real) (double) (double) (double)

Maximum scale of the model in the column direction.

Default: 1.1

Suggested values: 1.0, 1.1, 1.2, 1.3, 1.4, 1.5

Restriction: ScaleCMax >= ScaleCMin

MinScoreMinScoreMinScoreminScoremin_score (input_control)  real(-array) HTupleMaybeSequence[float]HTupleHtuple (real) (double) (double) (double)

Minimum score of the instances of the model to be found.

Default: 0.5

Suggested values: 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0

Value range: 0 ≤ MinScore MinScore MinScore minScore min_score ≤ 1

Minimum increment: 0.01

Recommended increment: 0.05

NumMatchesNumMatchesNumMatchesnumMatchesnum_matches (input_control)  integer HTupleintHTupleHtuple (integer) (int / long) (Hlong) (Hlong)

Number of instances of the model to be found (or 0 for all matches).

Default: 1

Suggested values: 0, 1, 2, 3, 4, 5, 10, 20

MaxOverlapMaxOverlapMaxOverlapmaxOverlapmax_overlap (input_control)  real HTuplefloatHTupleHtuple (real) (double) (double) (double)

Maximum overlap of the instances of the model to be found.

Default: 0.5

Suggested values: 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0

Value range: 0 ≤ MaxOverlap MaxOverlap MaxOverlap maxOverlap max_overlap ≤ 1

Minimum increment: 0.01

Recommended increment: 0.05

SubPixelSubPixelSubPixelsubPixelsub_pixel (input_control)  string(-array) HTupleMaybeSequence[str]HTupleHtuple (string) (string) (HString) (char*)

Subpixel accuracy if not equal to 'none'"none""none""none""none".

Default: 'least_squares' "least_squares" "least_squares" "least_squares" "least_squares"

Suggested values: 'none'"none""none""none""none", 'interpolation'"interpolation""interpolation""interpolation""interpolation", 'least_squares'"least_squares""least_squares""least_squares""least_squares", 'least_squares_high'"least_squares_high""least_squares_high""least_squares_high""least_squares_high", 'least_squares_very_high'"least_squares_very_high""least_squares_very_high""least_squares_very_high""least_squares_very_high", 'max_deformation 1'"max_deformation 1""max_deformation 1""max_deformation 1""max_deformation 1", 'max_deformation 2'"max_deformation 2""max_deformation 2""max_deformation 2""max_deformation 2", 'max_deformation 3'"max_deformation 3""max_deformation 3""max_deformation 3""max_deformation 3", 'max_deformation 4'"max_deformation 4""max_deformation 4""max_deformation 4""max_deformation 4", 'max_deformation 5'"max_deformation 5""max_deformation 5""max_deformation 5""max_deformation 5", 'max_deformation 6'"max_deformation 6""max_deformation 6""max_deformation 6""max_deformation 6"

NumLevelsNumLevelsNumLevelsnumLevelsnum_levels (input_control)  integer(-array) HTupleMaybeSequence[int]HTupleHtuple (integer) (int / long) (Hlong) (Hlong)

Number of pyramid levels used in the matching (and lowest pyramid level to use if |NumLevelsNumLevelsNumLevelsnumLevelsnum_levels| = 2).

Default: 0

List of values: 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10

GreedinessGreedinessGreedinessgreedinessgreediness (input_control)  real HTuplefloatHTupleHtuple (real) (double) (double) (double)

“Greediness” of the search heuristic (0: safe but slow; 1: fast but matches may be missed).

Default: 0.9

Suggested values: 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0

Value range: 0 ≤ Greediness Greediness Greediness greediness greediness ≤ 1

Minimum increment: 0.01

Recommended increment: 0.05

RowRowRowrowrow (output_control)  point.y-array HTupleSequence[float]HTupleHtuple (real) (double) (double) (double)

Row coordinate of the found instances of the model.

ColumnColumnColumncolumncolumn (output_control)  point.x-array HTupleSequence[float]HTupleHtuple (real) (double) (double) (double)

Column coordinate of the found instances of the model.

AngleAngleAngleangleangle (output_control)  angle.rad-array HTupleSequence[float]HTupleHtuple (real) (double) (double) (double)

Rotation angle of the found instances of the model.

ScaleRScaleRScaleRscaleRscale_r (output_control)  number-array HTupleSequence[float]HTupleHtuple (real) (double) (double) (double)

Scale of the found instances of the model in the row direction.

ScaleCScaleCScaleCscaleCscale_c (output_control)  number-array HTupleSequence[float]HTupleHtuple (real) (double) (double) (double)

Scale of the found instances of the model in the column direction.

ScoreScoreScorescorescore (output_control)  real-array HTupleSequence[float]HTupleHtuple (real) (double) (double) (double)

Score of the found instances of the model.

Example (HDevelop)

create_aniso_shape_model (ImageReduced, 0, rad(-15), rad(30), 0, \
                          0.9, 1.1, 0, 0.9, 1.1, 0, 'none', \
                          'use_polarity', 30, 10, ModelID)
get_shape_model_contours (ModelXLD, ModelID, 1)
find_aniso_shape_model (SearchImage, ModelID, rad(-15), rad(30), \
                        0.9, 1.1, 0.9, 1.1, 0.5, 1, 0.5, 'interpolation', \
                        0, 0, Row, Column, Angle, ScaleR, ScaleC, Score)
* Create transformation matrix
hom_mat2d_identity (HomMat2DIdentity)
hom_mat2d_scale (HomMat2DIdentity, ScaleR, ScaleC, 0, 0, HomMat2DScale)
hom_mat2d_rotate (HomMat2DScale, Angle, 0, 0, HomMat2DRotate)
hom_mat2d_translate (HomMat2DRotate, Row, Column, HomMat2DObject)
* Calculate true position of the model origin in the search image
affine_trans_pixel (HomMat2DObject, 0, 0, RowObject, ColObject)
* Display results
dev_display_shape_matching_results (ModelID, 'red', Row, Column, Angle, \
                  ScaleR, ScaleC, 0)

Result

If the parameter values are correct, the operator find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model returns the value 2 ( H_MSG_TRUE) . If the input is empty (no input images are available) the behavior can be set via set_system('no_object_result',<Result>)set_system("no_object_result",<Result>)SetSystem("no_object_result",<Result>)SetSystem("no_object_result",<Result>)set_system("no_object_result",<Result>). If necessary, an exception is raised.

Possible Predecessors

create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model, read_shape_modelread_shape_modelReadShapeModelReadShapeModelread_shape_model, set_shape_model_originset_shape_model_originSetShapeModelOriginSetShapeModelOriginset_shape_model_origin, set_shape_model_clutterset_shape_model_clutterSetShapeModelClutterSetShapeModelClutterset_shape_model_clutter

Possible Successors

clear_shape_modelclear_shape_modelClearShapeModelClearShapeModelclear_shape_model

Alternatives

find_generic_shape_modelfind_generic_shape_modelFindGenericShapeModelFindGenericShapeModelfind_generic_shape_model

See also

set_systemset_systemSetSystemSetSystemset_system, get_systemget_systemGetSystemGetSystemget_system

Module

Matching