create_aniso_shape_modelT_create_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model (Operator)
Name
create_aniso_shape_modelT_create_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
— Prepare an anisotropically scaled shape model for matching.
Signature
create_aniso_shape_model(Template : : NumLevels, AngleStart, AngleExtent, AngleStep, ScaleRMin, ScaleRMax, ScaleRStep, ScaleCMin, ScaleCMax, ScaleCStep, Optimization, Metric, Contrast, MinContrast : ModelID)
Herror T_create_aniso_shape_model(const Hobject Template, const Htuple NumLevels, const Htuple AngleStart, const Htuple AngleExtent, const Htuple AngleStep, const Htuple ScaleRMin, const Htuple ScaleRMax, const Htuple ScaleRStep, const Htuple ScaleCMin, const Htuple ScaleCMax, const Htuple ScaleCStep, const Htuple Optimization, const Htuple Metric, const Htuple Contrast, const Htuple MinContrast, Htuple* ModelID)
void CreateAnisoShapeModel(const HObject& Template, const HTuple& NumLevels, const HTuple& AngleStart, const HTuple& AngleExtent, const HTuple& AngleStep, const HTuple& ScaleRMin, const HTuple& ScaleRMax, const HTuple& ScaleRStep, const HTuple& ScaleCMin, const HTuple& ScaleCMax, const HTuple& ScaleCStep, const HTuple& Optimization, const HTuple& Metric, const HTuple& Contrast, const HTuple& MinContrast, HTuple* ModelID)
void HShapeModel::HShapeModel(const HImage& Template, const HTuple& NumLevels, double AngleStart, double AngleExtent, const HTuple& AngleStep, double ScaleRMin, double ScaleRMax, const HTuple& ScaleRStep, double ScaleCMin, double ScaleCMax, const HTuple& ScaleCStep, const HTuple& Optimization, const HString& Metric, const HTuple& Contrast, const HTuple& MinContrast)
void HShapeModel::HShapeModel(const HImage& Template, Hlong NumLevels, double AngleStart, double AngleExtent, double AngleStep, double ScaleRMin, double ScaleRMax, double ScaleRStep, double ScaleCMin, double ScaleCMax, double ScaleCStep, const HString& Optimization, const HString& Metric, Hlong Contrast, Hlong MinContrast)
void HShapeModel::HShapeModel(const HImage& Template, Hlong NumLevels, double AngleStart, double AngleExtent, double AngleStep, double ScaleRMin, double ScaleRMax, double ScaleRStep, double ScaleCMin, double ScaleCMax, double ScaleCStep, const char* Optimization, const char* Metric, Hlong Contrast, Hlong MinContrast)
void HShapeModel::HShapeModel(const HImage& Template, Hlong NumLevels, double AngleStart, double AngleExtent, double AngleStep, double ScaleRMin, double ScaleRMax, double ScaleRStep, double ScaleCMin, double ScaleCMax, double ScaleCStep, const wchar_t* Optimization, const wchar_t* Metric, Hlong Contrast, Hlong MinContrast)
(Windows only)
void HShapeModel::CreateAnisoShapeModel(const HImage& Template, const HTuple& NumLevels, double AngleStart, double AngleExtent, const HTuple& AngleStep, double ScaleRMin, double ScaleRMax, const HTuple& ScaleRStep, double ScaleCMin, double ScaleCMax, const HTuple& ScaleCStep, const HTuple& Optimization, const HString& Metric, const HTuple& Contrast, const HTuple& MinContrast)
void HShapeModel::CreateAnisoShapeModel(const HImage& Template, Hlong NumLevels, double AngleStart, double AngleExtent, double AngleStep, double ScaleRMin, double ScaleRMax, double ScaleRStep, double ScaleCMin, double ScaleCMax, double ScaleCStep, const HString& Optimization, const HString& Metric, Hlong Contrast, Hlong MinContrast)
void HShapeModel::CreateAnisoShapeModel(const HImage& Template, Hlong NumLevels, double AngleStart, double AngleExtent, double AngleStep, double ScaleRMin, double ScaleRMax, double ScaleRStep, double ScaleCMin, double ScaleCMax, double ScaleCStep, const char* Optimization, const char* Metric, Hlong Contrast, Hlong MinContrast)
void HShapeModel::CreateAnisoShapeModel(const HImage& Template, Hlong NumLevels, double AngleStart, double AngleExtent, double AngleStep, double ScaleRMin, double ScaleRMax, double ScaleRStep, double ScaleCMin, double ScaleCMax, double ScaleCStep, const wchar_t* Optimization, const wchar_t* Metric, Hlong Contrast, Hlong MinContrast)
(Windows only)
HShapeModel HImage::CreateAnisoShapeModel(const HTuple& NumLevels, double AngleStart, double AngleExtent, const HTuple& AngleStep, double ScaleRMin, double ScaleRMax, const HTuple& ScaleRStep, double ScaleCMin, double ScaleCMax, const HTuple& ScaleCStep, const HTuple& Optimization, const HString& Metric, const HTuple& Contrast, const HTuple& MinContrast) const
HShapeModel HImage::CreateAnisoShapeModel(Hlong NumLevels, double AngleStart, double AngleExtent, double AngleStep, double ScaleRMin, double ScaleRMax, double ScaleRStep, double ScaleCMin, double ScaleCMax, double ScaleCStep, const HString& Optimization, const HString& Metric, Hlong Contrast, Hlong MinContrast) const
HShapeModel HImage::CreateAnisoShapeModel(Hlong NumLevels, double AngleStart, double AngleExtent, double AngleStep, double ScaleRMin, double ScaleRMax, double ScaleRStep, double ScaleCMin, double ScaleCMax, double ScaleCStep, const char* Optimization, const char* Metric, Hlong Contrast, Hlong MinContrast) const
HShapeModel HImage::CreateAnisoShapeModel(Hlong NumLevels, double AngleStart, double AngleExtent, double AngleStep, double ScaleRMin, double ScaleRMax, double ScaleRStep, double ScaleCMin, double ScaleCMax, double ScaleCStep, const wchar_t* Optimization, const wchar_t* Metric, Hlong Contrast, Hlong MinContrast) const
(Windows only)
static void HOperatorSet.CreateAnisoShapeModel(HObject template, HTuple numLevels, HTuple angleStart, HTuple angleExtent, HTuple angleStep, HTuple scaleRMin, HTuple scaleRMax, HTuple scaleRStep, HTuple scaleCMin, HTuple scaleCMax, HTuple scaleCStep, HTuple optimization, HTuple metric, HTuple contrast, HTuple minContrast, out HTuple modelID)
public HShapeModel(HImage template, HTuple numLevels, double angleStart, double angleExtent, HTuple angleStep, double scaleRMin, double scaleRMax, HTuple scaleRStep, double scaleCMin, double scaleCMax, HTuple scaleCStep, HTuple optimization, string metric, HTuple contrast, HTuple minContrast)
public HShapeModel(HImage template, int numLevels, double angleStart, double angleExtent, double angleStep, double scaleRMin, double scaleRMax, double scaleRStep, double scaleCMin, double scaleCMax, double scaleCStep, string optimization, string metric, int contrast, int minContrast)
void HShapeModel.CreateAnisoShapeModel(HImage template, HTuple numLevels, double angleStart, double angleExtent, HTuple angleStep, double scaleRMin, double scaleRMax, HTuple scaleRStep, double scaleCMin, double scaleCMax, HTuple scaleCStep, HTuple optimization, string metric, HTuple contrast, HTuple minContrast)
void HShapeModel.CreateAnisoShapeModel(HImage template, int numLevels, double angleStart, double angleExtent, double angleStep, double scaleRMin, double scaleRMax, double scaleRStep, double scaleCMin, double scaleCMax, double scaleCStep, string optimization, string metric, int contrast, int minContrast)
HShapeModel HImage.CreateAnisoShapeModel(HTuple numLevels, double angleStart, double angleExtent, HTuple angleStep, double scaleRMin, double scaleRMax, HTuple scaleRStep, double scaleCMin, double scaleCMax, HTuple scaleCStep, HTuple optimization, string metric, HTuple contrast, HTuple minContrast)
HShapeModel HImage.CreateAnisoShapeModel(int numLevels, double angleStart, double angleExtent, double angleStep, double scaleRMin, double scaleRMax, double scaleRStep, double scaleCMin, double scaleCMax, double scaleCStep, string optimization, string metric, int contrast, int minContrast)
def create_aniso_shape_model(template: HObject, num_levels: Union[int, str], angle_start: float, angle_extent: float, angle_step: Union[float, str], scale_rmin: float, scale_rmax: float, scale_rstep: Union[float, str], scale_cmin: float, scale_cmax: float, scale_cstep: Union[float, str], optimization: MaybeSequence[str], metric: str, contrast: MaybeSequence[Union[int, str]], min_contrast: Union[int, str]) -> HHandle
Description
The operator create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
prepares a template,
which is passed in the image TemplateTemplateTemplateTemplatetemplatetemplate
, as an anisotropically
scaled shape model used for matching. The ROI of the model
is passed as the domain of TemplateTemplateTemplateTemplatetemplatetemplate
.
The output parameter ModelIDModelIDModelIDModelIDmodelIDmodel_id
is a handle for this model, which is
used in subsequent calls to find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
.
The center of gravity of the domain (region) of the model image
TemplateTemplateTemplateTemplatetemplatetemplate
is used as the origin (reference point) of the
model. A different origin can be set with
set_shape_model_originset_shape_model_originSetShapeModelOriginSetShapeModelOriginSetShapeModelOriginset_shape_model_origin
.
The model is generated using multiple image pyramid levels and is
stored in memory. If a complete pregeneration of the model is
selected (see below), the model is generated at multiple rotations
and anisotropic scales (i.e., independent scales in the row and
column direction) on each level. The model can be extended by clutter
parameters with set_shape_model_clutterset_shape_model_clutterSetShapeModelClutterSetShapeModelClutterSetShapeModelClutterset_shape_model_clutter
.
Input parameters in detail
NumLevelsNumLevelsNumLevelsNumLevelsnumLevelsnum_levels
:
-
The number of pyramid levels is determined with the parameter
NumLevelsNumLevelsNumLevelsNumLevelsnumLevelsnum_levels
. It should be chosen as large as possible
because by this the time necessary to find the object is
significantly reduced. On the other hand, NumLevelsNumLevelsNumLevelsNumLevelsnumLevelsnum_levels
must
be chosen such that the model is still recognizable and contains a
sufficient number of points (at least four) on the highest pyramid
level. This can be checked using the output of
inspect_shape_modelinspect_shape_modelInspectShapeModelInspectShapeModelInspectShapeModelinspect_shape_model
. If not enough model points are
generated, the number of pyramid levels is reduced internally until
enough model points are found on the highest pyramid level. If this
procedure would lead to a model with no pyramid levels, i.e., if the
number of model points is already too small on the lowest pyramid
level, create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
returns with an error
message.
If NumLevelsNumLevelsNumLevelsNumLevelsnumLevelsnum_levels
is set to 'auto'"auto""auto""auto""auto""auto" (or 0 for backwards
compatibility), create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
determines the number of
pyramid levels automatically. The automatically computed number of pyramid
levels can be queried using get_shape_model_paramsget_shape_model_paramsGetShapeModelParamsGetShapeModelParamsGetShapeModelParamsget_shape_model_params
. In rare
cases, it might happen that create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
determines a value for the number of pyramid levels that is too
large or too small. If the number of pyramid levels is chosen too
large, the model may not be recognized in the image or it may be
necessary to select very low parameters for MinScoreMinScoreMinScoreMinScoreminScoremin_score
or
GreedinessGreedinessGreedinessGreedinessgreedinessgreediness
in find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
in order to find
the model.
If the number of pyramid levels is chosen too small, the time required
to find the model in find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
may increase.
In these cases, the number of pyramid levels should be selected
using the output of inspect_shape_modelinspect_shape_modelInspectShapeModelInspectShapeModelInspectShapeModelinspect_shape_model
.
AngleStartAngleStartAngleStartAngleStartangleStartangle_start
, AngleExtentAngleExtentAngleExtentAngleExtentangleExtentangle_extent
, and AngleStepAngleStepAngleStepAngleStepangleStepangle_step
:
-
The parameters AngleStartAngleStartAngleStartAngleStartangleStartangle_start
and AngleExtentAngleExtentAngleExtentAngleExtentangleExtentangle_extent
determine the range of possible rotations, in which the model can
occur in the image. Note that the model can only be found in this
range of angles by find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
. The parameter
AngleStepAngleStepAngleStepAngleStepangleStepangle_step
determines the step length within the selected
range of angles. Hence, if subpixel accuracy is not specified in
find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
, this parameter specifies the
accuracy that is achievable for the angles in
find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
. AngleStepAngleStepAngleStepAngleStepangleStepangle_step
should be chosen
based on the size of the object. Smaller models do not have many
different discrete rotations in the image, and hence
AngleStepAngleStepAngleStepAngleStepangleStepangle_step
should be chosen larger for smaller models. If
AngleExtentAngleExtentAngleExtentAngleExtentangleExtentangle_extent
is not an integer multiple of
AngleStepAngleStepAngleStepAngleStepangleStepangle_step
, AngleStepAngleStepAngleStepAngleStepangleStepangle_step
is modified accordingly.
To ensure that for model instances without rotation angle values of
exactly 0.0 are returned by find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
,
the range of possible
rotations is modified as follows: If there is no positive integer
value n such that AngleStartAngleStartAngleStartAngleStartangleStartangle_start
plus n times
AngleStepAngleStepAngleStepAngleStepangleStepangle_step
is exactly 0.0, AngleStartAngleStartAngleStartAngleStartangleStartangle_start
is decreased
by up to AngleStepAngleStepAngleStepAngleStepangleStepangle_step
and AngleExtentAngleExtentAngleExtentAngleExtentangleExtentangle_extent
is increased by
AngleStepAngleStepAngleStepAngleStepangleStepangle_step
.
ScaleRMinScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin
, ScaleRMaxScaleRMaxScaleRMaxScaleRMaxscaleRMaxscale_rmax
, ScaleCMinScaleCMinScaleCMinScaleCMinscaleCMinscale_cmin
,
ScaleCMaxScaleCMaxScaleCMaxScaleCMaxscaleCMaxscale_cmax
, ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
, and ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
:
-
The parameters ScaleRMinScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin
, ScaleRMaxScaleRMaxScaleRMaxScaleRMaxscaleRMaxscale_rmax
,
ScaleCMinScaleCMinScaleCMinScaleCMinscaleCMinscale_cmin
, and ScaleCMaxScaleCMaxScaleCMaxScaleCMaxscaleCMaxscale_cmax
determine the range of
possible anisotropic scales of the model in the row and column
direction. A scale of 1 in both scale factors corresponds to the
original size of the model. The parameters ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
and
ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
determine the step length within the selected
range of scales. Hence, if subpixel accuracy is not specified in
find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
, these parameters specify the
accuracy that is achievable for the scales in
find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
. Like AngleStepAngleStepAngleStepAngleStepangleStepangle_step
,
ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
and ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
should be chosen based
on the size of the object. If the respective range of scales is not
an integer multiple of ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
and ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
,
ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
and ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
are modified
accordingly.
To ensure that for model instances that are not scaled scale values of
exactly 1.0 are returned by find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
,
the range of possible scales is modified as follows: If there are no
positive integer values n and m such that ScaleRMinScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin
plus n
times ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
is exactly 1.0 and ScaleCMinScaleCMinScaleCMinScaleCMinscaleCMinscale_cmin
plus
m times ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
is exactly 1.0, ScaleRMinScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin
and
ScaleCMinScaleCMinScaleCMinScaleCMinscaleCMinscale_cmin
are decreased by up to ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
and
ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
, respectively, and ScaleRMaxScaleRMaxScaleRMaxScaleRMaxscaleRMaxscale_rmax
and
ScaleCMaxScaleCMaxScaleCMaxScaleCMaxscaleCMaxscale_cmax
are increased such that the range of possible
scales is increased by ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
and ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
,
respectively.
Note that the transformations are treated internally such that the
scalings are applied first, followed by the rotation. Therefore,
the model should usually be aligned such that it appears
horizontally or vertically in the model image.
OptimizationOptimizationOptimizationOptimizationoptimizationoptimization
:
-
For particularly large models, it may be useful to reduce the number
of model points by setting OptimizationOptimizationOptimizationOptimizationoptimizationoptimization
to a value
different from 'none'"none""none""none""none""none". If OptimizationOptimizationOptimizationOptimizationoptimizationoptimization
=
'none'"none""none""none""none""none", all model points are stored. In all other cases,
the number of points is reduced according to the value of
OptimizationOptimizationOptimizationOptimizationoptimizationoptimization
. If the number of points is reduced, it may
be necessary in find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
to set the parameter
GreedinessGreedinessGreedinessGreedinessgreedinessgreediness
to a smaller value, e.g., 0.7 or 0.8.
For small models, the reduction of the number of model points does not result
in a speed-up of the search because in this case usually
significantly more potential instances of the model must be
examined.
If OptimizationOptimizationOptimizationOptimizationoptimizationoptimization
is set to 'auto'"auto""auto""auto""auto""auto",
create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
automatically determines the
reduction of the number of model points.
MetricMetricMetricMetricmetricmetric
:
-
The parameter MetricMetricMetricMetricmetricmetric
determines the conditions under which
the model is recognized in the image.
If MetricMetricMetricMetricmetricmetric
= 'use_polarity'"use_polarity""use_polarity""use_polarity""use_polarity""use_polarity", the object in the image and
the model must have the same contrast. If, for example, the model is a
bright object on a dark background, the object is found only if it is also
brighter than the background.
If MetricMetricMetricMetricmetricmetric
= 'ignore_global_polarity'"ignore_global_polarity""ignore_global_polarity""ignore_global_polarity""ignore_global_polarity""ignore_global_polarity", the object is
found in the image also if the contrast reverses globally. In the above
example, the object hence is also found if it is darker than the background.
The runtime of find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
will increase slightly in
this case.
If MetricMetricMetricMetricmetricmetric
= 'ignore_local_polarity'"ignore_local_polarity""ignore_local_polarity""ignore_local_polarity""ignore_local_polarity""ignore_local_polarity",
the model is found even if the contrast changes locally. This mode
can, for example, be useful if the object consists of a part with
medium gray value, within which either darker or brighter
sub-objects lie. Since in this case the runtime of
find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
increases significantly, it is
usually better to create several models that reflect the possible
contrast variations of the object with
create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
, and to match them simultaneously
with find_aniso_shape_modelsfind_aniso_shape_modelsFindAnisoShapeModelsFindAnisoShapeModelsFindAnisoShapeModelsfind_aniso_shape_models
.
The above three metrics can only be applied to single-channel images.
If a multichannel image is used as the model image or as the search image,
only the first channel will be used (and no error message will be returned).
If MetricMetricMetricMetricmetricmetric
= 'ignore_color_polarity'"ignore_color_polarity""ignore_color_polarity""ignore_color_polarity""ignore_color_polarity""ignore_color_polarity", the model is
found even if the color contrast changes locally. This is, for
example, the case if parts of the object can change their color,
e.g., from red to green. In particular, this mode is useful if it
is not known in advance in which channels the object is visible. In
this mode, the runtime of find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
can also
increase significantly. The metric 'ignore_color_polarity'"ignore_color_polarity""ignore_color_polarity""ignore_color_polarity""ignore_color_polarity""ignore_color_polarity"
can be used for images with an arbitrary number of channels. If it
is used for single-channel images it has the same effect as
'ignore_local_polarity'"ignore_local_polarity""ignore_local_polarity""ignore_local_polarity""ignore_local_polarity""ignore_local_polarity". It should be noted that for
MetricMetricMetricMetricmetricmetric
= 'ignore_color_polarity'"ignore_color_polarity""ignore_color_polarity""ignore_color_polarity""ignore_color_polarity""ignore_color_polarity" the number of
channels in the model creation with create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
and in the search with find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
can be
different. This can, for example, be used to create a model from a
synthetically generated single-channel image. Furthermore, it
should be noted that the channels do not need to contain a spectral
subdivision of the light (like in an RGB image). The channels can,
for example, also contain images of the same object that were
obtained by illuminating the object from different directions.
ContrastContrastContrastContrastcontrastcontrast
:
-
The parameter ContrastContrastContrastContrastcontrastcontrast
determines the contrast the model
points must have. The contrast is a measure for local gray value
differences between the object and the background and between
different parts of the object. ContrastContrastContrastContrastcontrastcontrast
should be chosen
such that only the significant features of the template are used for
the model. ContrastContrastContrastContrastcontrastcontrast
can also contain a tuple with two
values. In this case, the model is segmented using a method similar
to the hysteresis threshold method used in edges_imageedges_imageEdgesImageEdgesImageEdgesImageedges_image
.
Here, the first element of the tuple determines the lower threshold,
while the second element determines the upper threshold. For more
information about the hysteresis threshold method, see
hysteresis_thresholdhysteresis_thresholdHysteresisThresholdHysteresisThresholdHysteresisThresholdhysteresis_threshold
. Optionally, ContrastContrastContrastContrastcontrastcontrast
can
contain a third value as the last element of the tuple. This value
determines a threshold for the selection of significant model
components based on the size of the components, i.e., components
that have fewer points than the minimum size thus specified are
suppressed. As the minimum size is applied on the extent of the components,
the derived model contours can still be smaller than the specified
minimum size. This threshold for the minimum size is divided by two
for each successive pyramid level. If small model components should be
suppressed, but hysteresis thresholding should not be performed,
nevertheless three values must be specified in ContrastContrastContrastContrastcontrastcontrast
.
In this case, the first two values can simply be set to identical
values. The effect of this parameter can be checked in advance with
inspect_shape_modelinspect_shape_modelInspectShapeModelInspectShapeModelInspectShapeModelinspect_shape_model
.
If ContrastContrastContrastContrastcontrastcontrast
is set to 'auto'"auto""auto""auto""auto""auto",
create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
determines the
three above described values automatically. Besides, only the
contrast ('auto_contrast'"auto_contrast""auto_contrast""auto_contrast""auto_contrast""auto_contrast"), the hysteresis thresholds
('auto_contrast_hyst'"auto_contrast_hyst""auto_contrast_hyst""auto_contrast_hyst""auto_contrast_hyst""auto_contrast_hyst"), or the minimum size
('auto_min_size'"auto_min_size""auto_min_size""auto_min_size""auto_min_size""auto_min_size") can be determined automatically. The
remaining values that are not determined automatically can
additionally be passed in the form of a tuple. Also various
combinations are allowed: If, for example,
['auto_contrast','auto_min_size']["auto_contrast","auto_min_size"]["auto_contrast","auto_min_size"]["auto_contrast","auto_min_size"]["auto_contrast","auto_min_size"]["auto_contrast","auto_min_size"] is passed, both the
contrast and the minimum size are determined automatically. If
['auto_min_size',20,30]["auto_min_size",20,30]["auto_min_size",20,30]["auto_min_size",20,30]["auto_min_size",20,30]["auto_min_size",20,30] is passed, the minimum size is
determined automatically while the hysteresis thresholds are set to
20 and 30, etc. In certain cases, it might happen
that the automatic determination of the contrast thresholds is not
satisfying. For example, a manual setting of these parameters should
be preferred if certain model components should be included or
suppressed because of application-specific reasons or if the object
contains several different contrasts. Therefore, the contrast
thresholds should be automatically determined with
determine_shape_model_paramsdetermine_shape_model_paramsDetermineShapeModelParamsDetermineShapeModelParamsDetermineShapeModelParamsdetermine_shape_model_params
and subsequently verified using
inspect_shape_modelinspect_shape_modelInspectShapeModelInspectShapeModelInspectShapeModelinspect_shape_model
before calling
create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
. Note that MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
influences
the automatic contrast estimation, and hence also the estimation of the
minimum size.
MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
:
-
With MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
, it can be determined which contrast the
model must at least have in the recognition performed by
find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
. In other words, this parameter
separates the model from the noise in the image. Therefore, a good
choice is the range of gray value changes caused by the noise in the
image. If, for example, the gray values fluctuate within a range of
10 gray levels, MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
should be set to 10. If
multichannel images are used for the model and the search images,
and if the parameter MetricMetricMetricMetricmetricmetric
is set to
'ignore_color_polarity'"ignore_color_polarity""ignore_color_polarity""ignore_color_polarity""ignore_color_polarity""ignore_color_polarity" (see above) the noise in one
channel must be multiplied by the square root of the number of
channels to determine MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
. If, for example, the
gray values fluctuate within a range of 10 gray levels in a single
channel and the image is a three-channel image MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
should be set to 17. Obviously, MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
must be
smaller than ContrastContrastContrastContrastcontrastcontrast
. If the model should be recognized
in very low contrast images, MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
must be set to a
correspondingly small value. If the model should be recognized even
if it is severely occluded, MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
should be slightly
larger than the range of gray value fluctuations created by noise in
order to ensure that the position and rotation of the model are
extracted robustly and accurately by
find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
.
If MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
is set to 'auto'"auto""auto""auto""auto""auto", the minimum contrast is
determined automatically based on the noise in the model image.
Consequently, an automatic determination only makes sense if the image
noise during the recognition is similar to the noise in the model image.
Furthermore, in some cases it is advisable to increase the
automatically determined value in order to increase the robustness
against occlusions (see above). The automatically computed minimum
contrast can be queried using get_shape_model_paramsget_shape_model_paramsGetShapeModelParamsGetShapeModelParamsGetShapeModelParamsget_shape_model_params
.
Complete pregeneration of the model
Optionally, a second value can be passed in OptimizationOptimizationOptimizationOptimizationoptimizationoptimization
.
This value determines whether the model is pregenerated completely
or not. To do so, the second value of OptimizationOptimizationOptimizationOptimizationoptimizationoptimization
must be
set to either 'pregeneration'"pregeneration""pregeneration""pregeneration""pregeneration""pregeneration" or
'no_pregeneration'"no_pregeneration""no_pregeneration""no_pregeneration""no_pregeneration""no_pregeneration". If the second value is not used (i.e.,
if only one value is passed), the mode that is set with
set_system('pregenerate_shape_models',...)set_system("pregenerate_shape_models",...)SetSystem("pregenerate_shape_models",...)SetSystem("pregenerate_shape_models",...)SetSystem("pregenerate_shape_models",...)set_system("pregenerate_shape_models",...)
is used. With
the default value ('pregenerate_shape_models'"pregenerate_shape_models""pregenerate_shape_models""pregenerate_shape_models""pregenerate_shape_models""pregenerate_shape_models" =
'false'"false""false""false""false""false"), the model is not pregenerated completely. The
complete pregeneration of the model normally leads to slightly lower
runtimes because the model does not need to be transformed at
runtime. However, in this case, the memory requirements and the
time required to create the model are significantly higher. It
should also be noted that it cannot be expected that the two modes
return exactly identical results because transforming the model at
runtime necessarily leads to different internal data for the
transformed models than pregenerating the transformed models. For
example, if the model is not pregenerated completely,
find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
typically returns slightly lower
scores, which may require setting a slightly lower value for
MinScoreMinScoreMinScoreMinScoreminScoremin_score
than for a completely pregenerated model. Furthermore,
the poses obtained by interpolation may differ slightly in the two
modes. If maximum accuracy is desired, the pose of the model should
be determined by least-squares adjustment.
If a complete pregeneration of the model is selected,
the model is pregenerated for the selected angle and scale range
and stored in memory. The memory required to store the model is
proportional to the number of angle steps, the number of scale
steps, and the number of points in the model. Hence, if
AngleStepAngleStepAngleStepAngleStepangleStepangle_step
, ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
, or ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
are
too small or AngleExtentAngleExtentAngleExtentAngleExtentangleExtentangle_extent
or the range of scales are too
big, it may happen that the model no longer fits into the (virtual)
memory. In this case, AngleStepAngleStepAngleStepAngleStepangleStepangle_step
, ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
, or
ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
must be enlarged or AngleExtentAngleExtentAngleExtentAngleExtentangleExtentangle_extent
or the
range of scales must be reduced. In any case, it is desirable that
the model completely fits into the main memory, because this avoids
paging by the operating system, and hence the time to find the
object will be much smaller. Since angles can be determined with
subpixel resolution by find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
,
AngleStepAngleStepAngleStepAngleStepangleStepangle_step
>= 1° and
ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
, ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
>= 0.02 can be
selected for models of a diameter smaller than about 200 pixels.
If AngleStepAngleStepAngleStepAngleStepangleStepangle_step
=
'auto'"auto""auto""auto""auto""auto" or ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
, ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
=
'auto'"auto""auto""auto""auto""auto" (or 0 for backwards compatibility in both
cases) is selected, create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
automatically
determines a suitable angle or scale step length, respectively,
based on the size of the model. The automatically computed angle
and scale step lengths can be queried using
get_shape_model_paramsget_shape_model_paramsGetShapeModelParamsGetShapeModelParamsGetShapeModelParamsget_shape_model_params
.
If a complete pregeneration of the model is not selected, the model
is only created in a reference pose on each pyramid level. In this
case, the model must be transformed to the different angles and
scales at runtime in find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
. Because of
this, the recognition of the model might require slightly more time.
Note that pregenerated shape models are tailored to a specific image size.
For runtime reasons using images of different sizes during the search with
the same model in parallel is not supported. In this case, copies of the same
model must be used, otherwise the program may crash!
Execution Information
- Multithreading type: reentrant (runs in parallel with non-exclusive operators).
- Multithreading scope: global (may be called from any thread).
- Processed without parallelization.
This operator returns a handle. Note that the state of an instance of this handle type may be changed by specific operators even though the handle is used as an input parameter by those operators.
Parameters
TemplateTemplateTemplateTemplatetemplatetemplate
(input_object) (multichannel-)image →
objectHImageHObjectHImageHobject (byte / uint2)
Input image whose domain will be used to create
the model.
NumLevelsNumLevelsNumLevelsNumLevelsnumLevelsnum_levels
(input_control) integer →
HTupleUnion[int, str]HTupleHtuple (integer / string) (int / long / string) (Hlong / HString) (Hlong / char*)
Maximum number of pyramid levels.
Default value:
'auto'
"auto"
"auto"
"auto"
"auto"
"auto"
List of values: 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 'auto'"auto""auto""auto""auto""auto"
AngleStartAngleStartAngleStartAngleStartangleStartangle_start
(input_control) angle.rad →
HTuplefloatHTupleHtuple (real) (double) (double) (double)
Smallest rotation of the pattern.
Default value: -0.39
Suggested values: -3.14, -1.57, -0.79, -0.39, -0.20, 0.0
AngleExtentAngleExtentAngleExtentAngleExtentangleExtentangle_extent
(input_control) angle.rad →
HTuplefloatHTupleHtuple (real) (double) (double) (double)
Extent of the rotation angles.
Default value: 0.79
Suggested values: 6.29, 3.14, 1.57, 0.79, 0.39
Restriction: AngleExtent >= 0
AngleStepAngleStepAngleStepAngleStepangleStepangle_step
(input_control) angle.rad →
HTupleUnion[float, str]HTupleHtuple (real / string) (double / string) (double / HString) (double / char*)
Step length of the angles (resolution).
Default value:
'auto'
"auto"
"auto"
"auto"
"auto"
"auto"
Suggested values: 'auto'"auto""auto""auto""auto""auto", 0.0175, 0.0349, 0.0524, 0.0698, 0.0873
Restriction: AngleStep >= 0 && AngleStep <= pi / 16
ScaleRMinScaleRMinScaleRMinScaleRMinscaleRMinscale_rmin
(input_control) number →
HTuplefloatHTupleHtuple (real) (double) (double) (double)
Minimum scale of the pattern in the row direction.
Default value: 0.9
Suggested values: 0.5, 0.6, 0.7, 0.8, 0.9, 1.0
Restriction: ScaleRMin > 0
ScaleRMaxScaleRMaxScaleRMaxScaleRMaxscaleRMaxscale_rmax
(input_control) number →
HTuplefloatHTupleHtuple (real) (double) (double) (double)
Maximum scale of the pattern in the row direction.
Default value: 1.1
Suggested values: 1.0, 1.1, 1.2, 1.3, 1.4, 1.5
Restriction: ScaleRMax >= ScaleRMin
ScaleRStepScaleRStepScaleRStepScaleRStepscaleRStepscale_rstep
(input_control) number →
HTupleUnion[float, str]HTupleHtuple (real / string) (double / string) (double / HString) (double / char*)
Scale step length (resolution) in the row direction.
Default value:
'auto'
"auto"
"auto"
"auto"
"auto"
"auto"
Suggested values: 'auto'"auto""auto""auto""auto""auto", 0.01, 0.02, 0.05, 0.1, 0.15, 0.2
Restriction: ScaleRStep >= 0
ScaleCMinScaleCMinScaleCMinScaleCMinscaleCMinscale_cmin
(input_control) number →
HTuplefloatHTupleHtuple (real) (double) (double) (double)
Minimum scale of the pattern in the column direction.
Default value: 0.9
Suggested values: 0.5, 0.6, 0.7, 0.8, 0.9, 1.0
Restriction: ScaleCMin > 0
ScaleCMaxScaleCMaxScaleCMaxScaleCMaxscaleCMaxscale_cmax
(input_control) number →
HTuplefloatHTupleHtuple (real) (double) (double) (double)
Maximum scale of the pattern in the column direction.
Default value: 1.1
Suggested values: 1.0, 1.1, 1.2, 1.3, 1.4, 1.5
Restriction: ScaleCMax >= ScaleCMin
ScaleCStepScaleCStepScaleCStepScaleCStepscaleCStepscale_cstep
(input_control) number →
HTupleUnion[float, str]HTupleHtuple (real / string) (double / string) (double / HString) (double / char*)
Scale step length (resolution) in the column direction.
Default value:
'auto'
"auto"
"auto"
"auto"
"auto"
"auto"
Suggested values: 'auto'"auto""auto""auto""auto""auto", 0.01, 0.02, 0.05, 0.1, 0.15, 0.2
Restriction: ScaleCStep >= 0
OptimizationOptimizationOptimizationOptimizationoptimizationoptimization
(input_control) string(-array) →
HTupleMaybeSequence[str]HTupleHtuple (string) (string) (HString) (char*)
Kind of optimization and optionally method used
for generating the model.
Default value:
'auto'
"auto"
"auto"
"auto"
"auto"
"auto"
List of values: 'auto'"auto""auto""auto""auto""auto", 'no_pregeneration'"no_pregeneration""no_pregeneration""no_pregeneration""no_pregeneration""no_pregeneration", 'none'"none""none""none""none""none", 'point_reduction_high'"point_reduction_high""point_reduction_high""point_reduction_high""point_reduction_high""point_reduction_high", 'point_reduction_low'"point_reduction_low""point_reduction_low""point_reduction_low""point_reduction_low""point_reduction_low", 'point_reduction_medium'"point_reduction_medium""point_reduction_medium""point_reduction_medium""point_reduction_medium""point_reduction_medium", 'pregeneration'"pregeneration""pregeneration""pregeneration""pregeneration""pregeneration"
MetricMetricMetricMetricmetricmetric
(input_control) string →
HTuplestrHTupleHtuple (string) (string) (HString) (char*)
Match metric.
Default value:
'use_polarity'
"use_polarity"
"use_polarity"
"use_polarity"
"use_polarity"
"use_polarity"
List of values: 'ignore_color_polarity'"ignore_color_polarity""ignore_color_polarity""ignore_color_polarity""ignore_color_polarity""ignore_color_polarity", 'ignore_global_polarity'"ignore_global_polarity""ignore_global_polarity""ignore_global_polarity""ignore_global_polarity""ignore_global_polarity", 'ignore_local_polarity'"ignore_local_polarity""ignore_local_polarity""ignore_local_polarity""ignore_local_polarity""ignore_local_polarity", 'use_polarity'"use_polarity""use_polarity""use_polarity""use_polarity""use_polarity"
ContrastContrastContrastContrastcontrastcontrast
(input_control) number(-array) →
HTupleMaybeSequence[Union[int, str]]HTupleHtuple (integer / string) (int / long / string) (Hlong / HString) (Hlong / char*)
Threshold or hysteresis thresholds for the contrast
of the object in the template image and optionally
minimum size of the object parts.
Default value:
'auto'
"auto"
"auto"
"auto"
"auto"
"auto"
Suggested values: 'auto'"auto""auto""auto""auto""auto", 'auto_contrast'"auto_contrast""auto_contrast""auto_contrast""auto_contrast""auto_contrast", 'auto_contrast_hyst'"auto_contrast_hyst""auto_contrast_hyst""auto_contrast_hyst""auto_contrast_hyst""auto_contrast_hyst", 'auto_min_size'"auto_min_size""auto_min_size""auto_min_size""auto_min_size""auto_min_size", 10, 20, 30, 40, 60, 80, 100, 120, 140, 160
MinContrastMinContrastMinContrastMinContrastminContrastmin_contrast
(input_control) number →
HTupleUnion[int, str]HTupleHtuple (integer / string) (int / long / string) (Hlong / HString) (Hlong / char*)
Minimum contrast of the objects in the search images.
Default value:
'auto'
"auto"
"auto"
"auto"
"auto"
"auto"
Suggested values: 'auto'"auto""auto""auto""auto""auto", 1, 2, 3, 5, 7, 10, 20, 30, 40
Restriction: MinContrast < Contrast
ModelIDModelIDModelIDModelIDmodelIDmodel_id
(output_control) shape_model →
HShapeModel, HTupleHHandleHTupleHtuple (handle) (IntPtr) (HHandle) (handle)
Handle of the model.
Result
If the parameters are valid, the operator
create_aniso_shape_modelcreate_aniso_shape_modelCreateAnisoShapeModelCreateAnisoShapeModelCreateAnisoShapeModelcreate_aniso_shape_model
returns the value TRUE. If
necessary an exception is raised. If the parameters
NumLevelsNumLevelsNumLevelsNumLevelsnumLevelsnum_levels
and ContrastContrastContrastContrastcontrastcontrast
are chosen such that the
model contains too few points, the error 8510 is raised.
Possible Predecessors
draw_regiondraw_regionDrawRegionDrawRegionDrawRegiondraw_region
,
reduce_domainreduce_domainReduceDomainReduceDomainReduceDomainreduce_domain
,
thresholdthresholdThresholdThresholdThresholdthreshold
Possible Successors
find_aniso_shape_modelfind_aniso_shape_modelFindAnisoShapeModelFindAnisoShapeModelFindAnisoShapeModelfind_aniso_shape_model
,
find_aniso_shape_modelsfind_aniso_shape_modelsFindAnisoShapeModelsFindAnisoShapeModelsFindAnisoShapeModelsfind_aniso_shape_models
,
get_shape_model_paramsget_shape_model_paramsGetShapeModelParamsGetShapeModelParamsGetShapeModelParamsget_shape_model_params
,
clear_shape_modelclear_shape_modelClearShapeModelClearShapeModelClearShapeModelclear_shape_model
,
write_shape_modelwrite_shape_modelWriteShapeModelWriteShapeModelWriteShapeModelwrite_shape_model
,
set_shape_model_originset_shape_model_originSetShapeModelOriginSetShapeModelOriginSetShapeModelOriginset_shape_model_origin
,
set_shape_model_clutterset_shape_model_clutterSetShapeModelClutterSetShapeModelClutterSetShapeModelClutterset_shape_model_clutter
Alternatives
create_generic_shape_modelcreate_generic_shape_modelCreateGenericShapeModelCreateGenericShapeModelCreateGenericShapeModelcreate_generic_shape_model
See also
set_systemset_systemSetSystemSetSystemSetSystemset_system
,
get_systemget_systemGetSystemGetSystemGetSystemget_system
Module
Matching