#include "StdAfx.h"
|
#include "CameraControlAlign.h"
|
#include "CHImageControls/CHImageProcess.h"
|
#include "MainFrm.h"
|
#include "CHReviewSetting/SystemInfo.h"
|
#include "CHReviewRecipe/RsRcpAlignInfo.h"
|
|
#define ALIGN_TEMPLATE_PATH _T("D:\\DIT_Review\\Recipe\\AlignImage\\")
|
//#define ALIGN_TEMPLATE_PATH _T("C:\\DIT_Review\\Recipe\\AlignImage\\") //210805
|
|
using namespace CHImageControls;
|
|
CCameraControlAlign::CCameraControlAlign(void)
|
{
|
m_nLineType = SystemLine_CPJT;
|
m_nMachineType = SystemMachine_CPJT_QD, SystemMachine_CPJT_RGB, SystemMachine_CPJT_BANK, SystemMachine_CPJT_BDI, SystemMachine_CPJT_OC, SystemMachine_CPJT_CS;
|
|
m_pACC2P = NULL;
|
m_pVectorAlignLightControl = NULL;
|
m_nViewMode = 0;
|
m_strSaveImageBasePath = _T("");
|
|
m_nAlignType = FALSE;
|
|
InitializeCriticalSection(&m_csAlignFindProcess);
|
}
|
|
CCameraControlAlign::~CCameraControlAlign(void)
|
{
|
DisconnectCamera();
|
|
DeleteCriticalSection(&m_csAlignFindProcess);
|
}
|
|
int CCameraControlAlign::ConnectCamera(const VectorCameraControlInfo& vecCameraControlInfo)
|
{
|
if (!CCameraController::ConnectCamera(vecCameraControlInfo))
|
{
|
return FALSE;
|
}
|
|
int nConnectCameraCount = 0;
|
|
for (VectorCameraControlIt it=m_vecCameraControl.begin(); it!=m_vecCameraControl.end(); it++)
|
{
|
CAutoLightControl *pControl = new CAutoLightControl((*it)->GetIndex());
|
pControl->SetALC2P(static_cast<IAutoLightControl2Paraent*>(this));
|
m_vecAutoLightControl.push_back(pControl);
|
|
SLightData* pData = new SLightData();
|
m_vecLightData.push_back(pData);
|
|
// alloc finder
|
CAlignFinder *pFinder = NULL;
|
// if (m_nLineType== SystemLine_FIC8 && m_nMachineType==SystemMachine_ITO && m_nAlignType)
|
// {
|
// int nIndex = (*it)->GetCameraIndex();
|
// pFinder = new CAlignFinder_Corner(nIndex);
|
// g_pLog->DisplayMessage(_T("[ConnectCamera] EdgeAlign Finder New Created!!"));
|
// }
|
// else
|
// {
|
pFinder = new CAlignFinder((*it)->GetIndex());
|
g_pLog->DisplayMessage(_T("[ConnectCamera] AlignMark Finder New Created : Index %d!!"), (*it)->GetIndex());
|
// }
|
|
// push back
|
m_vecAlignFinder.push_back(pFinder);
|
|
if(int(vecCameraControlInfo.size()) > nConnectCameraCount)
|
{
|
if(GetFrameRateApply()==1){
|
|
if(SetFrameRate(nConnectCameraCount, vecCameraControlInfo[nConnectCameraCount].GetFrameCount()))
|
{
|
g_pLog->DisplayMessage(_T("[ConnectCamera] Align Camera Frame Rate Change Success!"));
|
}
|
else
|
{
|
g_pLog->DisplayMessage(_T("[ConnectCamera] Align Camera Frame Rate Change Fail!"));
|
}
|
|
}
|
}
|
nConnectCameraCount++;
|
}
|
|
return TRUE;
|
}
|
|
void CCameraControlAlign::SetAlignType(BOOL nAlignType)
|
{
|
m_nAlignType = nAlignType;
|
}
|
|
//#3357 KYH Align Image 초기화 ADD START
|
void CCameraControlAlign::SetAlignClearImage()//210330 kyh Align 이미지 초기화
|
{
|
//m_vecCameraImage[0]->GetImageData(View_Camera)->ClearImage(0);
|
//m_vecCameraImage[1]->GetImageData(View_Camera)->ClearImage(0);
|
for (int i = 0; i < (int)m_vecCameraImage.size(); i++)
|
{
|
/*m_vecCameraImage[i]->GetImageData(View_Camera).
|
m_vecCameraImage[i]->GetImageData(View_Result)->ClearImage(0);
|
m_vecCameraImage[i]->GetImageData(View_Matching)->ClearImage(0);*/
|
m_vecCameraImage[i]->ClearImage(View_Result);
|
m_vecCameraImage[i]->ClearImage(View_Camera);
|
m_vecCameraImage[i]->ClearImage(View_Matching);
|
}
|
}
|
//#3357 KYH Align Image 초기화 ADD END
|
|
void CCameraControlAlign::DisconnectCamera()
|
{
|
CCameraController::DisconnectCamera();
|
|
for (VectorAutoLightControlIt it=m_vecAutoLightControl.begin(); it!=m_vecAutoLightControl.end(); it++)
|
{
|
CAutoLightControl *pControl = static_cast<CAutoLightControl*>(*it);
|
//delete pControl;
|
pControl = NULL;
|
}
|
m_vecAutoLightControl.clear();
|
|
for (VectorAlignFinderIt it=m_vecAlignFinder.begin(); it!=m_vecAlignFinder.end(); it++)
|
{
|
CAlignFinder *pFinder = static_cast<CAlignFinder*>(*it);
|
delete pFinder;
|
pFinder = NULL;
|
}
|
m_vecAlignFinder.clear();
|
|
for (VectorLightDataIt it=m_vecLightData.begin(); it!=m_vecLightData.end(); it++)
|
{
|
SLightData *pData = static_cast<SLightData*>(*it);
|
delete pData;
|
pData = NULL;
|
}
|
m_vecLightData.clear();
|
}
|
|
int CCameraControlAlign::Camera_Control(int nControlType)
|
{
|
int nRetValue = 0;
|
switch(nControlType)
|
{
|
case CameraControlStop:
|
Camera_Stop();
|
m_nControlType = nControlType;
|
nRetValue = 1;
|
break;
|
|
case CameraControlLive:
|
nRetValue = Camera_Live();
|
break;
|
|
case CameraControlStart:
|
nRetValue = Camera_Start(1);
|
break;
|
}
|
|
if (m_pACC2P)
|
{
|
m_pACC2P->IACC2P_CameraControl(m_nControlType);
|
}
|
|
return nRetValue;
|
}
|
|
int CCameraControlAlign::AlignProcess()
|
{
|
int nViewIdx;
|
for(VectorCameraControlIt it = m_vecCameraControl.begin(); it != m_vecCameraControl.end(); it++)
|
{
|
SAutoLightParam sControlParam;
|
sControlParam.bAutoProcess = m_AlignRecipe.bAutoProcess;
|
sControlParam.dTargetMin = m_AlignRecipe.dTargetMin;
|
sControlParam.dTargetMax = m_AlignRecipe.dTargetMax;
|
sControlParam.dLightLevelMin = m_AlignRecipe.dExposureMin;
|
sControlParam.dLightLevelMax = m_AlignRecipe.dExposureMax;
|
|
nViewIdx = (*it)->GetIndex();
|
if(m_vecAutoLightControl.size() <= nViewIdx) continue; //KMJ Added
|
m_vecAutoLightControl[nViewIdx]->ProcessAutoLight(sControlParam);
|
}
|
|
return Align_Success;
|
}
|
|
// camera control 2 parent
|
|
BOOL CCameraControlAlign::ICC2P_GetCurrentFrame(int nCameraIndex, int nFrameWidth, int nFrameHeight, int nFrameChannels, CCHImageData* pImageData)
|
{
|
if (m_pACC2P==NULL) return FALSE;
|
|
return m_pACC2P->IACC2P_GetCurrentFrame(nCameraIndex, nFrameWidth, nFrameHeight, nFrameChannels, pImageData);
|
}
|
|
BOOL CCameraControlAlign::ICC2P_FrameCaptured(int nCameraIndex, int nFrameIndex, int nFrameCount)
|
{
|
if (nCameraIndex<0 || nCameraIndex>=(int)m_vecCameraControl.size())
|
{
|
g_pLog->DisplayMessage(_T("camera index fail"));
|
return FALSE;
|
}
|
|
EnterCriticalSection(&m_vecLightData[nCameraIndex]->csProcess);
|
|
BYTE *pCameraBuffer = (BYTE*)m_vecCameraControl[nCameraIndex]->GetImageAddress(nFrameIndex);
|
if (pCameraBuffer==NULL)
|
{
|
g_pLog->DisplayMessage(_T("frame index fail"));
|
LeaveCriticalSection(&m_vecLightData[nCameraIndex]->csProcess);
|
return FALSE;
|
}
|
|
double dTime = 0.0;
|
m_vecCameraControl[nCameraIndex]->GetExposureTime(dTime);
|
m_vecLightData[nCameraIndex]->dExposureTime = dTime;
|
m_vecLightData[nCameraIndex]->dLightValue = CAutoLightControl::GetAverageValue((BYTE*)pCameraBuffer,
|
m_vecCameraControl[nCameraIndex]->GetWidth(),
|
m_vecCameraControl[nCameraIndex]->GetHeight(),
|
m_vecCameraControl[nCameraIndex]->GetWidthStep());
|
|
if (m_pACC2P)
|
{
|
int nLightLevel = 0;
|
if (m_pACC2P->IACC2P_GetLightLevel(nCameraIndex, nLightLevel))
|
{
|
m_vecLightData[nCameraIndex]->nControlLightLevel = nLightLevel;
|
}
|
}
|
|
LeaveCriticalSection(&m_vecLightData[nCameraIndex]->csProcess);
|
|
m_vecCameraImageIndex[nCameraIndex] = View_Camera;
|
BYTE *pImageBuffer = (BYTE*)((m_vecCameraImage[nCameraIndex])->GetImageData(View_Camera))->GetImageBuffer();
|
if (pImageBuffer==NULL)
|
{
|
g_pLog->DisplayMessage(_T("buffer index fail"));
|
return FALSE;
|
}
|
|
memcpy(pImageBuffer, pCameraBuffer, sizeof(BYTE)*m_vecCameraImageSize[nCameraIndex]);
|
// m_vecCameraControl[nCameraIndex]->ReleaseImageAddress(pCameraBuffer);
|
|
if (m_pACC2P==NULL)
|
{
|
g_pLog->DisplayMessage(_T("accp is null!"));
|
return FALSE;
|
}
|
|
// if (m_nProcessStatus==PCControl_ReviewStart)
|
// {
|
// g_pLog->DisplayMessage(_T("AlignProcess() 진입 %d %d"), nCameraIndex, nFrameIndex);
|
// m_nProcessStatus = PCControl_ReviewEnd;
|
// AlignProcess();
|
// g_pLog->DisplayMessage(_T("AlignProcess() 종료 %d %d"), nCameraIndex, nFrameIndex);
|
// }
|
m_pACC2P->IACC2P_FrameCaptured(nCameraIndex, m_nViewMode, (m_vecCameraImage[nCameraIndex])->GetImageData(m_nViewMode));
|
return TRUE;
|
}
|
|
BOOL CCameraControlAlign::LoadTemplateImage()
|
{
|
BOOL bAllLoad = TRUE;
|
|
CString strFilename = _T("");
|
CCHImageData tempImage;
|
/*< LYW 20211012 - #3671 ADD Start >*/
|
CCHImageData assistanttempImage;
|
/*< LYW 20211012 - #3671 ADD End >*/
|
for (int i=0; i<2; i++)
|
{
|
if(m_vecCameraImage.size() <= i) continue;
|
(m_vecCameraImage[i]->GetImageData(View_Template))->ReleaseImage();
|
/*< LYW 20211012 - #3671 ADD Start >*/
|
(m_vecCameraImage[i]->GetImageData(View_AssistantTemplate))->ReleaseImage();
|
/*< LYW 20211012 - #3671 ADD End >*/
|
|
// load
|
CString strFilename = ALIGN_IMAGE_PATH;
|
/*< LYW 20211012 - #3671 ADD Start >*/
|
CString strAssistantFilename = ALIGN_IMAGE_PATH;
|
/*< LYW 20211012 - #3671 ADD End >*/
|
|
if (i==0)
|
{
|
strFilename += _T("\\") + m_AlignRecipe.strFirstImageFile;
|
/*< LYW 20211012 - #3671 ADD Start >*/
|
strAssistantFilename += _T("\\") + m_AlignRecipe.strFirstAssistantImageFile;
|
/*< LYW 20211012 - #3671 ADD End >*/
|
|
}
|
else if (i==1)
|
{
|
strFilename += _T("\\") + m_AlignRecipe.strSecondImageFile;
|
/*< LYW 20211012 - #3671 ADD Start >*/
|
strAssistantFilename += _T("\\") + m_AlignRecipe.strSecondAssistantImageFile;
|
/*< LYW 20211012 - #3671 ADD End >*/
|
}
|
|
//strFilename += m_AlignRecipe.strTemplateName[i];
|
|
if (tempImage.LoadImage(strFilename)==FALSE)
|
{
|
bAllLoad = bAllLoad && FALSE;
|
continue;
|
}
|
|
/*< LYW 20211012 - #3671 ADD Start >*/
|
if (assistanttempImage.LoadImage(strAssistantFilename) == FALSE)
|
{
|
bAllLoad = bAllLoad && FALSE;
|
continue;
|
}
|
/*< LYW 20211012 - #3671 ADD End >*/
|
|
bAllLoad = bAllLoad && tempImage.GetBandImage(BandTypeGray, m_vecCameraImage[i]->GetImageData(View_Template));
|
/*< LYW 20211012 - #3671 ADD Start >*/
|
bAllLoad = bAllLoad && assistanttempImage.GetBandImage(BandTypeGray, m_vecCameraImage[i]->GetImageData(View_AssistantTemplate));
|
/*< LYW 20211012 - #3671 ADD End >*/
|
}
|
|
return bAllLoad;
|
}
|
|
BOOL CCameraControlAlign::ChangeExposureTime()
|
{
|
if (!m_AlignRecipe.bManualProcess)
|
{
|
BOOL bAllLoad = TRUE;
|
bAllLoad = m_vecCameraControl[0]->SetExposureAuto(TRUE);
|
bAllLoad = m_vecCameraControl[1]->SetExposureAuto(TRUE);
|
|
g_pLog->DisplayMessage(_T("CCameraControlAlign::AlignCam1 Exposure is AUTO!"));
|
g_pLog->DisplayMessage(_T("CCameraControlAlign::AlignCam2 Exposure is AUTO!"));
|
|
return TRUE;
|
}
|
|
BOOL bAllLoad = TRUE;
|
bAllLoad = m_vecCameraControl[0]->SetExposureTime(m_AlignRecipe.dFirstCamExposure);
|
bAllLoad = m_vecCameraControl[1]->SetExposureTime(m_AlignRecipe.dSecondCamExposure);
|
|
g_pLog->DisplayMessage(_T("CCameraControlAlign::AlignCam1 Exposure is %.1lfms!"), m_AlignRecipe.dFirstCamExposure);
|
g_pLog->DisplayMessage(_T("CCameraControlAlign::AlignCam2 Exposure is %.1lfms!"), m_AlignRecipe.dSecondCamExposure);
|
|
return bAllLoad;
|
}
|
|
BOOL CCameraControlAlign::ChangeLightLevel()
|
{
|
if (m_pACC2P)
|
{
|
m_pACC2P->IACC2P_SetLightLevel(0, (int)m_AlignRecipe.dFirstCamExposure);
|
m_pACC2P->IACC2P_SetLightLevel(1, (int)m_AlignRecipe.dSecondCamExposure);
|
g_pLog->DisplayMessage(_T("CCameraControlAlign::AlignCam1 Light Level is %d"), (int)m_AlignRecipe.dFirstCamExposure);
|
g_pLog->DisplayMessage(_T("CCameraControlAlign::AlignCam2 Light Level is %d"), (int)m_AlignRecipe.dSecondCamExposure);
|
return TRUE;
|
}
|
|
// 1029
|
if(m_pVectorAlignLightControl)
|
{
|
if((int)m_pVectorAlignLightControl->size() == 2)
|
{
|
(*m_pVectorAlignLightControl)[0]->SetLightLevel((int)m_AlignRecipe.dFirstCamExposure);
|
(*m_pVectorAlignLightControl)[1]->SetLightLevel((int)m_AlignRecipe.dSecondCamExposure);
|
|
g_pLog->DisplayMessage(_T("CCameraControlAlign::AlignCam1 Light Level is %d"), (int)m_AlignRecipe.dFirstCamExposure);
|
g_pLog->DisplayMessage(_T("CCameraControlAlign::AlignCam2 Light Level is %d"), (int)m_AlignRecipe.dSecondCamExposure);
|
|
return TRUE;
|
}
|
}
|
|
return FALSE;
|
}
|
|
void CCameraControlAlign::UpdateProcessStatus(int nLineType, int nMachineType, int nProcessStatus, const CGlassResult* pGlassResult)
|
{
|
if (pGlassResult==NULL) return;
|
|
m_nProcessStatus = nProcessStatus;
|
|
switch(m_nProcessStatus)
|
{
|
case ProcessLoading:
|
m_AlignRecipe = pGlassResult->m_AlignRecipe;
|
m_AlignResult.Reset();
|
if (LoadTemplateImage())
|
{
|
g_pLog->DisplayMessage(_T("[CameraControlAlign] Load Template is Success!"));
|
}
|
break;
|
}
|
|
}
|
|
BOOL CCameraControlAlign::SetTemplateImage(int nCameraIndex, CCHImageData* pImageData)
|
{
|
if (pImageData==NULL || nCameraIndex<0 || nCameraIndex>=(int)m_vecCameraImage.size())
|
{
|
return FALSE;
|
}
|
|
return pImageData->CopyImageTo(m_vecCameraImage[nCameraIndex]->GetImageData(View_Template));
|
}
|
|
BOOL CCameraControlAlign::GetTemplateImage(int nCameraIndex, CCHImageData* pImageData)
|
{
|
if (pImageData==NULL || nCameraIndex<0 || nCameraIndex>=(int)m_vecCameraImage.size())
|
{
|
return FALSE;
|
}
|
|
return pImageData->CopyImageFrom(m_vecCameraImage[nCameraIndex]->GetImageData(View_Template));
|
}
|
|
BOOL CCameraControlAlign::GetResultImage(int nCameraIndex, CCHImageData* pImageData)
|
{
|
if (pImageData==NULL || nCameraIndex<0 || nCameraIndex>=(int)m_vecCameraImage.size())
|
{
|
return FALSE;
|
}
|
|
return pImageData->CopyImageFrom(m_vecCameraImage[nCameraIndex]->GetImageData(View_Result));
|
}
|
|
|
BOOL CCameraControlAlign::SaveResultImage(int nCameraIndex, const CString& strFilename)
|
{
|
if (nCameraIndex<0 || nCameraIndex>=(int)m_vecCameraImage.size())
|
{
|
return FALSE;
|
}
|
|
return (m_vecCameraImage[nCameraIndex]->GetImageData(View_Result))->SaveImage(strFilename);
|
}
|
|
void CCameraControlAlign::SetSaveImageBasePath(CString strPath)
|
{
|
m_strSaveImageBasePath = strPath;
|
}
|
|
CCameraControl* CCameraControlAlign::GetCameraControl(int nCameraIndex)
|
{
|
for (VectorCameraControlIt it=m_vecCameraControl.begin(); it!=m_vecCameraControl.end(); it++)
|
{
|
if ((*it)->GetIndex()==nCameraIndex)
|
{
|
return (*it);
|
}
|
}
|
return NULL;
|
}
|
|
BOOL CCameraControlAlign::AL2P_ChangeExposure(int nCameraIndex, double dExposureTime)
|
{
|
CCameraControl *pCamera = GetCameraControl(nCameraIndex);
|
if (pCamera==NULL) return FALSE;
|
|
return pCamera->SetExposureTime(dExposureTime);
|
}
|
|
BOOL CCameraControlAlign::AL2P_ChangeLightLevel(int nCameraIndex, int nLightLevel)
|
{
|
if (m_pACC2P)
|
{
|
return m_pACC2P->IACC2P_SetLightLevel(nCameraIndex, nLightLevel);
|
}
|
|
// 1029
|
if(m_pVectorAlignLightControl)
|
{
|
if(nCameraIndex >= 0 && nCameraIndex < (int)m_pVectorAlignLightControl->size())
|
{
|
(*m_pVectorAlignLightControl)[nCameraIndex]->SetLightLevel(nLightLevel);
|
return TRUE;
|
}
|
}
|
|
return FALSE;
|
}
|
|
BOOL CCameraControlAlign::AL2P_ChangeGain(int nCameraIndex, double dGainValue)
|
{
|
return FALSE;
|
}
|
|
BOOL CCameraControlAlign::AL2P_GetCurrentLightExposure(int nCameraIndex, double& dLight, double& dExposure)
|
{
|
|
EnterCriticalSection(&m_vecLightData[nCameraIndex]->csProcess);
|
|
dLight = m_vecLightData[nCameraIndex]->dLightValue;
|
dExposure = m_vecLightData[nCameraIndex]->dExposureTime;
|
|
LeaveCriticalSection(&m_vecLightData[nCameraIndex]->csProcess);
|
|
return TRUE;
|
}
|
|
BOOL CCameraControlAlign::AL2P_GetCurrentLightLevel(int nCameraIndex, double& dLight, int& nLightLevel)
|
{
|
|
EnterCriticalSection(&m_vecLightData[nCameraIndex]->csProcess);
|
|
dLight = m_vecLightData[nCameraIndex]->dLightValue;
|
nLightLevel = m_vecLightData[nCameraIndex]->nControlLightLevel;
|
|
LeaveCriticalSection(&m_vecLightData[nCameraIndex]->csProcess);
|
|
return TRUE;
|
}
|
|
void CCameraControlAlign::AL2P_ProcessEnd(int nCameraIndex, int nResultCode, double dLightValue, double dExposureTime)
|
{
|
BOOL bRet = FALSE;
|
// auto light result
|
m_AlignResult.nAutoLightResult[nCameraIndex] = nResultCode;
|
m_AlignResult.dExposureTime[nCameraIndex] = dExposureTime;
|
|
// find align mark
|
bRet = FindAlignMark(nCameraIndex);
|
|
// send find result
|
EnterCriticalSection(&m_csAlignFindProcess);
|
|
if (m_pACC2P && m_AlignResult.AlignFindEnd())
|
{
|
// 얼라인 찾기
|
m_AlignResult.nResultCode = m_AlignResult.GetResultCode();
|
m_AlignResult.SetFindResult(m_AlignRecipe);
|
|
// 부모로 결과 전송
|
m_pACC2P->IACC2P_AlignResult(m_AlignResult);
|
|
m_AlignResult.Reset();
|
}
|
|
LeaveCriticalSection(&m_csAlignFindProcess);
|
}
|
|
void CCameraControlAlign::AL2P_ProcessEnd(int nCameraIndex, int nResultCode, double dLightValue, int nLightLevel)
|
{
|
BOOL bRet = FALSE;
|
// auto light result
|
m_AlignResult.nAutoLightResult[nCameraIndex] = nResultCode;
|
m_AlignResult.nControlLightLevel[nCameraIndex] = nLightLevel;
|
|
// find align mark
|
bRet = FindAlignMark(nCameraIndex);
|
|
// send find result
|
EnterCriticalSection(&m_csAlignFindProcess);
|
|
if (m_pACC2P && m_AlignResult.AlignFindEnd())
|
{
|
// 얼라인 찾기
|
m_AlignResult.nResultCode = m_AlignResult.GetResultCode();
|
m_AlignResult.SetFindResult(m_AlignRecipe);
|
|
// 부모로 결과 전송
|
m_pACC2P->IACC2P_AlignResult(m_AlignResult);
|
|
m_AlignResult.Reset();
|
}
|
|
LeaveCriticalSection(&m_csAlignFindProcess);
|
}
|
|
void CCameraControlAlign::AL2P_CurrentResult(int nCameraIndex, int nCount, int nResultCode, double dLightValue, double dExposureTime)
|
{
|
g_pLog->DisplayMessage(_T("AL Current [%d] Align Camera, Iter: %d, Code: %d, Light: %.3lf, Exposure: %.3lf"), nCameraIndex, nCount, nResultCode, dLightValue, dExposureTime);
|
}
|
|
void CCameraControlAlign::AL2P_CurrentResult(int nCameraIndex, int nCount, int nResultCode, double dLightValue, int nLightLevel)
|
{
|
g_pLog->DisplayMessage(_T("AL Current [%d] Align Camera, Iter: %d, Code: %d, Light: %.3lf, Control Light Level: %d"), nCameraIndex, nCount, nResultCode, dLightValue, nLightLevel);
|
}
|
|
BOOL CCameraControlAlign::FindAlignMark(int nCameraIdx)
|
{
|
if(nCameraIdx < 0 || nCameraIdx >= m_vecCameraImage.size()) return FALSE;
|
|
CCameraImageData* pCameraImage = m_vecCameraImage[nCameraIdx];
|
CAlignFinder* pAlignFinder = m_vecAlignFinder[nCameraIdx];
|
|
/*< LYW 20211014 - #index MOD Start >*/
|
if (pCameraImage == NULL || pAlignFinder == NULL || m_pACC2P == NULL) return FALSE;
|
/*< LYW 20211014 - #index MOD End >*/
|
/*< Origin Code >*/
|
//if (pCameraImage == NULL || pAlignFinder == NULL) return FALSE;
|
|
// set find param;
|
SAlignFindParam findParam;
|
findParam.bMatchProcess = m_AlignRecipe.bUseImage;
|
findParam.dMatchRate = m_AlignRecipe.dMatchingRate;
|
|
findParam.bEdgeProcess = m_AlignRecipe.bUseEdge;
|
findParam.nAlignWidth = m_AlignRecipe.nEdgeWidth;
|
findParam.nAlignHeight = m_AlignRecipe.nEdgeHeight;
|
findParam.nEdgeThreshold = m_AlignRecipe.nEdgeThreshold;
|
findParam.nMergeRange = m_AlignRecipe.nMergeRange;
|
findParam.dEdgeRate = m_AlignRecipe.dEdgeRate;
|
findParam.dMatchingPixelStandard = _tcstod(m_AlignRecipe.strMatchingPixelStandard,NULL);
|
findParam.dMatchingAlarmCondition = _tcstod(m_AlignRecipe.strMatchingAlarmCondition, NULL);
|
// findParam.m_nAlignMarkPos = m_AlignRecipe.nAlignMarkPosition;
|
|
|
CString strPath = _T("");
|
CTime time = CTime::GetCurrentTime();
|
strPath.Format(_T("%s\\Cam%02d_AlignORG_%04d%02d%02d%02d%02d%02d.BMP"), m_strSaveImageBasePath, nCameraIdx,
|
time.GetYear(), time.GetMonth(), time.GetDay(), time.GetHour(), time.GetMinute(), time.GetSecond());
|
(pCameraImage->GetImageData(View_Camera))->SaveImage(strPath);
|
|
CString uploadAlignimage = _T("");
|
uploadAlignimage.Format(_T("Cam%02d_AlignORG_%04d%02d%02d%02d%02d%02d.BMP"), nCameraIdx,
|
time.GetYear(), time.GetMonth(), time.GetDay(), time.GetHour(), time.GetMinute(), time.GetSecond());
|
|
//공유메모리 얼라인 이미지명 업로드 chm 190623
|
/*if(nCameraIdx == 0){
|
size_t CharactersConverted;
|
wcstombs_s(&CharactersConverted,CDitGlassRawClient::GetInstance()->GetGlassData()->m_strAlignMarkFisrt, uploadAlignimage, _TRUNCATE);
|
}
|
else{
|
size_t CharactersConverted;
|
wcstombs_s(&CharactersConverted,CDitGlassRawClient::GetInstance()->GetGlassData()->m_strAlignMarkSecond, uploadAlignimage, _TRUNCATE);
|
}*/
|
// find process
|
CCHImageData camImage;
|
|
//#3671 TEST CODE Start
|
// CCHImageData TestImage;
|
//
|
// (m_vecCameraImage[nCameraIdx]->GetImageData(View_Camera))->ReleaseImage();
|
// CString strFilename = ALIGN_IMAGE_PATH;
|
// if (nCameraIdx == 0)
|
// {
|
// strFilename += _T("\\Test1.BMP");
|
// }
|
// else
|
// {
|
// strFilename += _T("\\Test2.BMP");
|
// }
|
// TestImage.LoadImage(strFilename);
|
// TestImage.GetBandImage(BandTypeGray, m_vecCameraImage[nCameraIdx]->GetImageData(View_Camera));
|
// (m_vecCameraImage[nCameraIdx]->GetImageData(View_Camera))->GetBandImage(BandTypeGray, &camImage);
|
//#3671 TEST CODE End
|
// find process 바슬러 카메라가 3채널짜리라서 시퀀스 돌때는 1채널로 바꿔서 돔 ksm
|
if ((pCameraImage->GetImageData(View_Camera))->GetChannels() > 1)
|
(pCameraImage->GetImageData(View_Camera))->GetBandImage(BandTypeGray, &camImage);
|
else
|
(pCameraImage->GetImageData(View_Camera))->CopyImageTo(&camImage);
|
|
|
//SAlignFindResult findResult = pAlignFinder->FindAlign(pCameraImage->GetImageData(View_Matching), findParam, pCameraImage->GetImageData(View_Template));
|
SAlignFindResult findResult = pAlignFinder->FindAlign(&camImage, findParam, pCameraImage->GetImageData(View_Template));
|
g_pLog->DisplayMessage(_T("Main TempleteMatching Result : %d Cam ResultCode = %d, PixelX = %d, PixelY = %d, MatchingRate = %.3lf"), nCameraIdx, findResult.nResultCode, int(findResult.dPosX + 0.5), int(findResult.dPosY + 0.5), findResult.dMatchValue);
|
|
// result process
|
if (findResult.nResultCode==AlignMatch_Success)
|
{
|
g_pLog->DisplayMessage(_T("Main TempleteMatching Success!"));
|
m_AlignResult.nFindAlign[nCameraIdx] = 1;
|
m_AlignResult.dFindPixelX[nCameraIdx] = int(findResult.dPosX + 0.5);
|
m_AlignResult.dFindPixelY[nCameraIdx] = int(findResult.dPosY + 0.5);
|
m_AlignResult.dFindScore[nCameraIdx] = findResult.dMatchValue;
|
}
|
/*< LYW 20211013 - #3671 ADD Start >*/
|
|
else
|
{
|
SAlignFindResult findAssistantResult = pAlignFinder->FindAlign(&camImage, findParam, pCameraImage->GetImageData(View_AssistantTemplate));
|
g_pLog->DisplayMessage(_T("Assistant TempleteMatching Result : %d Cam ResultCode = %d, PixelX = %d, PixelY = %d, MatchingRate = %.3lf"), nCameraIdx, findAssistantResult.nResultCode, int(findAssistantResult.dPosX + 0.5), int(findAssistantResult.dPosY + 0.5), findAssistantResult.dMatchValue);
|
if (findAssistantResult.nResultCode == AlignMatch_Success)
|
{
|
g_pLog->DisplayMessage(_T("Assistant TempleteMatching Success!"));
|
m_AlignResult.nFindAlign[nCameraIdx] = 1;
|
m_AlignResult.dFindPixelX[nCameraIdx] = int(findAssistantResult.dPosX + 0.5);
|
m_AlignResult.dFindPixelY[nCameraIdx] = int(findAssistantResult.dPosY + 0.5);
|
m_AlignResult.dFindScore[nCameraIdx] = findAssistantResult.dMatchValue;
|
}
|
|
else
|
{
|
int dffmain2assistanstX = int(findResult.dPosX) - int(findAssistantResult.dPosX);
|
int dffmain2assistanstY = int(findResult.dPosY) - int(findAssistantResult.dPosY);
|
g_pLog->DisplayMessage(_T("Main, Assistant TempleteMatching All Low Score! differenceX = %d, differenceY = %d"), dffmain2assistanstX, dffmain2assistanstY);
|
|
if (abs(dffmain2assistanstX) <= findParam.dMatchingPixelStandard && abs(dffmain2assistanstY) <= findParam.dMatchingPixelStandard && findResult.dMatchValue != 0.0)
|
{
|
m_AlignResult.nFindAlign[nCameraIdx] = 1;
|
m_AlignResult.dFindPixelX[nCameraIdx] = int(findResult.dPosX + 0.5);
|
m_AlignResult.dFindPixelY[nCameraIdx] = int(findResult.dPosY + 0.5);
|
m_AlignResult.dFindScore[nCameraIdx] = findResult.dMatchValue;
|
m_pACC2P->IACC2P_SetAccumaulate(m_pACC2P->IACC2P_GetAccumaulate() + 1);
|
g_pLog->DisplayMessage(_T("Main, Assistant Templete Find Same Point! Align Success! Accumaulate = %d"), m_pACC2P->IACC2P_GetAccumaulate());
|
}
|
|
else
|
{
|
g_pLog->DisplayMessage(_T("Main Assistant Templete Don`t Find Same Point! Align Fail!"));
|
m_AlignResult.nFindAlign[nCameraIdx] = 0;
|
m_AlignResult.dFindScore[nCameraIdx] = findResult.dMatchValue;
|
}
|
|
if (m_pACC2P->IACC2P_GetAccumaulate() > findParam.dMatchingAlarmCondition)
|
{
|
m_AlignResult.nFindAlign[nCameraIdx] = 0;
|
m_AlignResult.dFindScore[nCameraIdx] = findResult.dMatchValue;
|
m_pACC2P->IACC2P_SetAccumaulate(0);
|
g_pLog->DisplayMessage(_T("Low Matching, But Align Same Point, Count is more Than MatchingAlarmCondition , Accumaulate = %d Reset"), m_pACC2P->IACC2P_GetAccumaulate());
|
}
|
}
|
}
|
/*< LYW 20211013 - #3671 ADD End >*/
|
|
/*< LYW 20211013 - #3671 Delete Start >*/
|
// else
|
// {
|
// m_AlignResult.nFindAlign[nCameraIdx] = 0;
|
// m_AlignResult.dFindScore[nCameraIdx] = findResult.dMatchValue;
|
// }
|
/*< LYW 20211013 - #3671 Delete End >*/
|
|
// save threshold image
|
strPath.Format(_T("%s\\Cam%02d_AlignTH_%04d%02d%02d%02d%02d%02d.jpg"), m_strSaveImageBasePath, nCameraIdx,
|
time.GetYear(), time.GetMonth(), time.GetDay(), time.GetHour(), time.GetMinute(), time.GetSecond());
|
pAlignFinder->SaveThresholdImage(strPath);
|
|
// save Result Image 20210208 얼라인 결과 저장
|
strPath.Format(_T("%s\\Cam%02d_AlignResult_%04d%02d%02d%02d%02d%02d.jpg"), m_strSaveImageBasePath, nCameraIdx,
|
time.GetYear(), time.GetMonth(), time.GetDay(), time.GetHour(), time.GetMinute(), time.GetSecond());
|
pAlignFinder->SaveResultImage(strPath);
|
|
Sleep(10);
|
if (nCameraIdx == 0)
|
{
|
m_AlignResult.strAlignFirest = strPath;
|
}
|
else
|
{
|
m_AlignResult.strAlignSecond = strPath;
|
}
|
|
|
// update matching image
|
pAlignFinder->GetBlobImage(pCameraImage->GetImageData(View_Matching));
|
|
// update result image
|
pAlignFinder->GetResultImage(pCameraImage->GetImageData(View_Result));
|
|
return TRUE;
|
}
|
|
|
CAlignResult CCameraControlAlign::FindAlignMark(const VectorString& vectorFilename)
|
{
|
m_AlignResult.Reset();
|
|
// template image
|
CCHImageData orgImage;
|
BOOL bAllSuccess = TRUE;
|
for (int i=0; i<(int)m_vecCameraImage.size(); i++)
|
{
|
(m_vecCameraImage[i]->GetImageData(View_Template))->ReleaseImage();
|
|
CString strFilename = ALIGN_TEMPLATE_PATH;
|
strFilename += vectorFilename[i];
|
|
// load template
|
if (orgImage.LoadImage(strFilename))
|
{
|
orgImage.GetBandImage(BandTypeGray, m_vecCameraImage[i]->GetImageData(View_Template));
|
}
|
|
// find align mark
|
bAllSuccess = bAllSuccess && FindAlignMark(i);
|
}
|
|
m_AlignResult.nResultCode = m_AlignResult.GetResultCode();
|
|
// if (bAllSuccess==FALSE)
|
// {
|
// m_AlignResult.nResultCode = Align_Fail;
|
// }
|
|
return m_AlignResult;
|
}
|
|
void CCameraControlAlign::IALC2P_DisplayMessage( int nIndex, const TCHAR* lpstrFormat, ... )
|
{
|
|
}
|
|
BOOL CCameraControlAlign::IALC2P_ChangeLightLevel( int nIndex, double dLightLevel )
|
{
|
if (m_pACC2P)
|
{
|
return m_pACC2P->IACC2P_SetLightLevel(nIndex, (int)dLightLevel);
|
}
|
|
// 1029
|
if(m_pVectorAlignLightControl)
|
{
|
if(nIndex >= 0 && nIndex < (int)m_pVectorAlignLightControl->size())
|
{
|
(*m_pVectorAlignLightControl)[nIndex]->SetLightLevel(dLightLevel);
|
return TRUE;
|
}
|
}
|
|
return FALSE;
|
}
|
|
BOOL CCameraControlAlign::IALC2P_GetCurrentLightLevel( int nIndex, double& dLight, double& dLightLevel )
|
{
|
EnterCriticalSection(&m_vecLightData[nIndex]->csProcess);
|
|
dLight = m_vecLightData[nIndex]->dLightValue;
|
dLightLevel = m_vecLightData[nIndex]->nControlLightLevel;
|
|
LeaveCriticalSection(&m_vecLightData[nIndex]->csProcess);
|
|
return TRUE;
|
}
|
|
void CCameraControlAlign::IALC2P_ProcessEnd( int nIndex, int nResultCode, double dLightValue, double dLightLevel )
|
{
|
BOOL bRet = FALSE;
|
// auto light result
|
m_AlignResult.nAutoLightResult[nIndex] = nResultCode;
|
m_AlignResult.nControlLightLevel[nIndex] = (int)dLightLevel;
|
|
// find align mark
|
bRet = FindAlignMark(nIndex);
|
|
// send find result
|
EnterCriticalSection(&m_csAlignFindProcess);
|
|
if (m_pACC2P && m_AlignResult.AlignFindEnd())
|
{
|
// 얼라인 찾기
|
m_AlignResult.nResultCode = m_AlignResult.GetResultCode();
|
m_AlignResult.SetFindResult(m_AlignRecipe);
|
|
// 부모로 결과 전송
|
m_pACC2P->IACC2P_AlignResult(m_AlignResult);
|
|
m_AlignResult.Reset();
|
}
|
|
LeaveCriticalSection(&m_csAlignFindProcess);
|
}
|
|
void CCameraControlAlign::IALC2P_CurrentResult( int nIndex, int nCount, int nResultCode, double dLightValue, double dLightLevel )
|
{
|
g_pLog->DisplayMessage(_T("AL Current [%d] Align Camera, Iter: %d, Code: %d, Light: %.3lf, Control Light Level: %d"), nIndex, nCount, nResultCode, dLightValue, (int)dLightLevel);
|
}
|
|
BOOL CCameraControlAlign::SaveAutoTempImage( int nFirstAlignX, int nFristAlignY, int nSecondAlignX, int nSecondAlignY )
|
{
|
if(int(m_vecCameraImage.size()) < 2) return FALSE;
|
|
for(int nCnt=0; nCnt < int(m_vecCameraImage.size()); nCnt++)
|
{
|
CCameraImageData* pCameraImage = m_vecCameraImage[nCnt];
|
if(pCameraImage == NULL) continue;
|
|
CCHImageData* pImageData = NULL;
|
pImageData = pCameraImage->GetImageData(View_Camera);
|
if(pImageData == NULL || pImageData->GetWidth() < 1 || pImageData->GetHeight() < 1) continue;
|
|
CString szFilter = _T("BMP(*.bmp)|*.bmp| JPG(*.jpg)|*.jpg| All Files(*.*)|*.*||");
|
|
CString strPath;
|
CFileDialog dlg(FALSE, NULL, NULL, OFN_HIDEREADONLY, szFilter);
|
dlg.m_ofn.lpstrTitle = _T("Save Templete Image");
|
|
if(dlg.DoModal() == IDCANCEL) return FALSE;
|
|
int nAlignMarkSizeX = m_AlignRecipe.nEdgeWidth;
|
int nAlignMarkSizeY = m_AlignRecipe.nEdgeHeight;
|
|
CRect rtRect;
|
|
if(nCnt ==0)
|
{
|
rtRect.left = nFirstAlignX - nAlignMarkSizeX/2;
|
rtRect.top = nFristAlignY - nAlignMarkSizeY/2;
|
}
|
else
|
{
|
rtRect.left = nSecondAlignX - nAlignMarkSizeX/2;
|
rtRect.top = nSecondAlignY - nAlignMarkSizeY/2;
|
}
|
|
CString strFileName;
|
strFileName.Format(_T("%s.bmp"), dlg.GetPathName());
|
|
CCHImageData subImage;
|
|
pImageData->GetSubImage(rtRect.left, rtRect.top, nAlignMarkSizeX, nAlignMarkSizeY, &subImage);
|
|
if (subImage.SaveImage(strFileName)==FALSE) return FALSE;
|
}
|
|
return TRUE;
|
}
|
|
BOOL CCameraControlAlign::SetFrameRate(int nIndex, double dRate )
|
{
|
int nCameraCount = int(m_vecCameraControl.size());
|
if(nCameraCount < 1) return FALSE;
|
|
if(int(m_vecCameraControl.size()) <= nIndex) return FALSE;
|
|
return m_vecCameraControl[nIndex]->SetFrameRate(dRate);
|
}
|
|
BOOL CCameraControlAlign::AlignCameraDisconnectCheck()
|
{
|
if(int(m_vecCameraControl.size()) < 1) return FALSE;
|
|
int nCamCnt = 0;
|
for (VectorCameraControlIt it=m_vecCameraControl.begin(); it!=m_vecCameraControl.end(); it++)
|
{
|
if(m_vecCameraControl[nCamCnt]->DisconnectCheck() == FALSE) return FALSE;
|
|
nCamCnt++;
|
}
|
|
return TRUE;
|
}
|