NationalInstruments视觉软件助手Assistant导出C#代码报错,包含未知类型,提示缺少using指令或程序集
using Vision_Assistant.Utilities;
namespace Vision_Assistant
{
static class Image_Processing1
{
public static Collection pmResults;
private static Collection IVA_MatchPattern(VisionImage image,
IVA_Data ivaData,
string templatePath,
MatchingAlgorithm algorithm,
float[] angleRangeMin,
float[] angleRangeMax,
int[] advOptionsItems,
double[] advOptionsValues,
int numAdvancedOptions,
int matchesRequested,
float score,
Roi roi,
int stepIndex)
{
using (VisionImage imageTemplate = new VisionImage(ImageType.U8, 7))
{
int numObjectResults = 4;
Collection patternMatchingResults = new Collection();
// Read the image template.
imageTemplate.ReadVisionFile(templatePath);
// If the image is calibrated, we also need to log the calibrated position (x and y) and angle -> 7 results instead of 4
if ((image.InfoTypes & InfoTypes.Calibration) != 0)
{
numObjectResults = 7;
}
// Set the angle range.
Collection angleRange = new Collection();
for(int i = 0; i < 2; ++i)
{
angleRange.Add(new RotationAngleRange(angleRangeMin[i], angleRangeMax[i]));
}
// Set the advanced options.
Collection advancedMatchOptions = new Collection();
for(int i = 0; i < numAdvancedOptions; ++i)
{
advancedMatchOptions.Add(new PMMatchAdvancedSetupDataOption((MatchSetupOption)advOptionsItems[i], advOptionsValues[i]));
}
// Searches for areas in the image that match a given pattern.
patternMatchingResults = Algorithms.MatchPattern3(image, imageTemplate, algorithm, matchesRequested, score, angleRange, roi, advancedMatchOptions);
// ////////////////////////////////////////
// Store the results in the data structure.
// ////////////////////////////////////////
// First, delete all the results of this step (from a previous iteration)
Functions.IVA_DisposeStepResults(ivaData, stepIndex);
if (patternMatchingResults.Count > 0)
{
ivaData.stepResults[stepIndex].results.Add(new IVA_Result("# of objects", patternMatchingResults.Count));
for(int i = 0; i < patternMatchingResults.Count; ++i)
{
ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (Pix.)", i+1), patternMatchingResults[i].Position.X));
ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (Pix.)", i+1), patternMatchingResults[i].Position.Y));
// If the image is calibrated, add the calibrated positions.
if ((image.InfoTypes & InfoTypes.Calibration) != 0)
{
ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.X Position (World)", i+1), patternMatchingResults[i].CalibratedPosition.X));
ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Y Position (World)", i+1), patternMatchingResults[i].CalibratedPosition.Y));
}
ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Angle (degrees)", i+1), patternMatchingResults[i].Rotation));
if ((image.InfoTypes & InfoTypes.Calibration) != 0)
{
ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Calibrated Angle (degrees)", i+1), patternMatchingResults[i].CalibratedRotation));
}
ivaData.stepResults[stepIndex].results.Add(new IVA_Result(String.Format("Match {0}.Score", i+1), patternMatchingResults[i].Score));
}
}
return patternMatchingResults;
}
}
using Vision_Assistant.Utilities;缺失,IVA_Data ivaData缺失,Functions.IVA_DisposeStepResults(ivaData, stepIndex);缺失