Commit 773f1289 刘韬

1

1 个父辈 cae06c13
正在显示 57 个修改的文件 包含 486 行增加792 行删除
......@@ -94,6 +94,7 @@
<Compile Include="deviceLibrary\IO\kangnaide\MasterTcpClient.cs" />
<Compile Include="deviceLibrary\IO\AIOBOX\AIOBOXManager.cs" />
<Compile Include="deviceLibrary\IO\IOBase.cs" />
<Compile Include="deviceLibrary\IO\NanjingSDotIO.cs" />
<Compile Include="manager\LedManager.cs" />
<Compile Include="manager\LineWidthManager.cs" />
<Compile Include="manager\ResourceControl.cs" />
......
......@@ -16,6 +16,7 @@ namespace TSA_V.DeviceLibrary
this.SizeX = sizeX;
this.SizeY = sizeY;
this.PenWidth = lineWidth;
this.ShowText = showText;
}
public int PX = 2;
......
......@@ -21,11 +21,15 @@ namespace TSA_V.DeviceLibrary
public static void Init()
{
bool isAIOBox = ConfigAppSettings.GetIntValue(Setting_Init.UseAIOBOX).Equals(1);
if (isAIOBox)
int isAIOBox = ConfigAppSettings.GetIntValue(Setting_Init.UseAIOBOX);
if (isAIOBox==1)
{
instance = new AIOBOXManager();
}
else if (isAIOBox == 2)
{
instance = new NanjingSDotIO();
}
else
{
instance = new KNDManager();
......
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Sockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using TSA_V.DeviceLibrary;
class NanjingSDotIO : IOBase
{
byte station = 1;
public string IP;
TcpClient tcpClient = new TcpClient();
Thread iomonitorThread;
ushort seq = 0;
/// <summary>
///
/// </summary>
/// <param name="changindex">变化的io列表</param>
/// <param name="sta">所有IO状态</param>
public delegate void IO_Changed(List<int> changindex, bool[] sta);
public event IO_Changed DI_Changed_Event;
public event IO_Changed DO_Changed_Event;
/// <summary>
/// 连接状态变化, 手动连接不触发
/// </summary>
public event EventHandler<bool> ConnectionState_Event;
public NanjingSDotIO()
{
DIStartAddress = 0;
DoStartAddress = 0;
}
~NanjingSDotIO()
{
iomonitorrun = false;
Close();
}
bool systemrun = false;
/// <summary>
/// 打开IO
/// </summary>
/// <returns></returns>
public bool Open()
{
tcpClient.Dispose();
tcpClient = new TcpClient();
tcpClient.ReceiveTimeout = 50;
tcpClient.SendTimeout = 50;
lock (tcpClient)
{
try
{
var connectResult = tcpClient.ConnectAsync(IP, 502);
if (connectResult.Wait(1000))
{
systemrun = true;
iomonitorrun = true;
iomonitorThread = new Thread(new ThreadStart(iomonitor));
iomonitorThread.Start();
ConnectionState_Event?.Invoke(this, true);
return true;
}
else
{
tcpClient.EndConnect(connectResult);
tcpClient.Close();
return false;
}
}
catch
{
return false;
}
}
}
/// <summary>
/// 关闭IO
/// </summary>
public void Close()
{
try
{
systemrun = false;
iomonitorrun = false;
if (tcpClient.Connected)
tcpClient.Close();
}
catch { }
}
/// <summary>
/// 连接状态
/// </summary>
public bool IsConn
{
get => iomonitorrun && systemrun;
}
bool iomonitorrun = false;
/// <summary>
/// 循环读全部IO
/// </summary>
void iomonitor()
{
iomonitorrun = true;
while (iomonitorrun && systemrun)
{
Thread.Sleep(2);
try
{
ReadDO();
}
catch (SocketException)
{
if (tcpClient!=null && tcpClient.Connected)
tcpClient.Close();
ConnectionState_Event?.Invoke(this, false);
iomonitorrun = false;
}
}
if (systemrun)
{
do
{
Thread.Sleep(1000);
} while (systemrun && !Open());
}
}
/// <summary>
/// 写io
/// </summary>
/// <param name="donum">地址</param>
/// <param name="state">On/Off</param>
/// <returns></returns>
public bool WriteDO(int donum, bool state)
{
byte funCode = 0x0F;//批量写圈
byte startAddress = (byte)(64 + donum);//写圈起始地址
byte startLength = 1;
var seqhead = BitConverter.GetBytes(seq);
byte iostate = (byte)(state ? 1 : 0);
byte[] by = new byte[]
{
//事物标识符
seqhead[0],
seqhead[1],
//协议标识符 固定值
0x00,
0x00,
//长度
0x00,
0x06,
//从站地址
station,
//功能码
funCode,
0x00,
startAddress, //起始地址
0x00,
startLength, //读个数
0x01,
iostate
};
by[5] = (byte)(by.Length - 6);
bool check = true;
lock (tcpClient)
{
try
{
seqadd();
tcpClient.Client.Send(by);
byte[] result = new byte[100];
var ulength = tcpClient.Client.Receive(result);
var newResult = result.ToList().Take(ulength).ToArray();
if (newResult[0] == seqhead[0] && newResult[1] == seqhead[1])
{
check = true;
}
else
{
return false;
}
}
catch
{
return false;
}
}
/*
var newResult = result.ToList().Take(ulength).ToArray();
//输出报文
var output = string.Join(" ", newResult.Select(x => x.ToString("X2")));
*/
return check;
}
/// <summary>
/// 所有DI状态
/// </summary>
public bool[] DIstate = new bool[64];
/// <summary>
/// 所有DO状态
/// </summary>
public bool[] DOstate = new bool[64];
bool[] lastDIstate = new bool[64];
bool[] lastDOstate = new bool[64];
/// <summary>
/// 读全部IO
/// </summary>
void ReadDO()
{
byte funCode = 0x01;//批量写圈
byte startAddress = 0;//写圈起始地址
byte startLength = 128;
var seqhead = BitConverter.GetBytes(seq);
byte[] by = new byte[]
{
//事物标识符
seqhead[0],
seqhead[1],
//协议标识符 固定值
0x00,
0x00,
//长度
0x00,
0x06,
//从站地址
station,
//功能码
funCode,
0x00,
startAddress, //起始地址
0x00,
startLength, //读个数
};
by[5] = (byte)(by.Length - 6);
byte[] result = new byte[100];
int ulength = 0;
lock (tcpClient)
{
try
{
seqadd();
tcpClient.Client.Send(by);
ulength = tcpClient.Client.Receive(result);
}
catch (SocketException se)
{
throw se;
}
}
var newResult = result;//.ToList().Take(ulength).ToArray();
//aa = BitConverter.ToString(newResult);
//Console.WriteLine(aa);
if (newResult[0] == seqhead[0] && newResult[1] == seqhead[1])
{
int diIndex = 9;
int doIndex = 17;
for (int i = 0; i < 32; i++)
{
int a = (int)Math.Floor(i / 8d);
int b = a * 2;
DIstate[i + 8 * a] = Convert.ToBoolean(newResult[diIndex + b] >> i & 0x01);
DIstate[i + 8 + 8 * a] = Convert.ToBoolean(newResult[diIndex + 1 + b] >> i & 0x01);
DOstate[i + 8 * a] = Convert.ToBoolean(newResult[doIndex + b] >> i & 0x01);
DOstate[i + 8 + 8 * a] = Convert.ToBoolean(newResult[doIndex + 1 + b] >> i & 0x01);
}
for (int i = 0; i < 64; i++)
{
List<int> dichgindex = new List<int>();
List<int> dochgindex = new List<int>();
if (lastDIstate[i] != DIstate[i])
{
dichgindex.Add(i);
lastDIstate[i] = DIstate[i];
}
if (lastDOstate[i] != DOstate[i])
{
dochgindex.Add(i);
lastDOstate[i] = DOstate[i];
}
if (dichgindex.Count > 0)
DI_Changed_Event?.BeginInvoke(dichgindex, DIstate, null, null);
if (dochgindex.Count > 0)
DO_Changed_Event?.BeginInvoke(dichgindex, DOstate, null, null);
}
}
else
{
}
//输出报文
//var output = string.Join(" ", newResult.Select(x => x.ToString("X2")));
}
void seqadd()
{
seq++;
if (seq >= ushort.MaxValue - 10)
seq = 0;
}
public override void ConnectionKND(List<string> dIODeviceNameList)
{
}
public override void ReadAllDI(string deviceName, byte slaveId)
{
ReadDO();
}
public override void ReadAllDO(string deviceName, byte slaveId)
{
ReadDO();
}
public override void ConnectionIP(string kNDIP, ushort kNDPort)
{
this.IP = kNDIP;
Open();
iomonitorThread = new Thread(new ThreadStart(iomonitor));
}
public override void WriteSingleDO(string deviceName, byte slaveId, ushort index, IO_VALUE value, int time)
{
WriteDO(index, value == IO_VALUE.HIGH);
}
public override bool IsConnection(string kNDIP)
{
return IsConn;
}
public override void WriteSingleDO(string deviceName, byte slaveId, ushort index, IO_VALUE value)
{
WriteDO(index, value == IO_VALUE.HIGH);
}
public override IO_VALUE GetDIValue(string deviceName, byte slaveID, ushort v)
{
return DIstate[v] ? IO_VALUE.HIGH : IO_VALUE.LOW;
}
public override IO_VALUE GetDOValue(string deviceName, byte slaveID, ushort v)
{
return DOstate[v] ? IO_VALUE.HIGH : IO_VALUE.LOW;
}
public override IO_VALUE GetIOValue(ConfigIO configIO)
{
if (configIO.ProType.Equals(ConfigItemType.DI))
{
return DIstate[configIO.IOIndex] ? IO_VALUE.HIGH : IO_VALUE.LOW;
}
else {
return DOstate[configIO.IOIndex] ? IO_VALUE.HIGH : IO_VALUE.LOW;
}
}
public override void CloseAllDO()
{
for (int i = 0; i < 32; i++)
{
WriteDO(i, false);
}
}
public override void CloseAllConnection()
{
Close();
}
}
......@@ -95,6 +95,8 @@ namespace TSA_V.DeviceLibrary
while (true)
{
Thread.Sleep(200);
if (!IOManager.ShuddenOK())
break;
if (IsStop)
{
result = ResourceControl.GetString(ResourceControl.UserStop, "用户中止");
......
......@@ -281,6 +281,8 @@ namespace TSA_V.DeviceLibrary
{
foreach (NodeInfo node in map.Values)
{
if (!IOManager.ShuddenOK())
return;
if (!PUSICANControl.IsHomeEnd(node.NodeId))
{
timeOutMsg = ResourceControl.GetString(ResourceControl.RNodeGoHome, " 旋转轴[{0}]原点返回完成", node.NodeId);
......
......@@ -179,7 +179,10 @@ namespace TSA_V.DeviceLibrary
public static bool GetWaitResult(List<WaitResultInfo> waitList, TimeSpan span,bool oneOkCanEnd, out string waitMsg)
{
waitMsg = "";
if (!IOManager.ShuddenOK())
return false;
try
{
if (waitList.Count <= 0)
......
......@@ -265,7 +265,7 @@ namespace TSA_V.DeviceLibrary
if (TSAVBean.IsValidPosition(currPoint.NodePositionX, currPoint.NodePositionY))
{
//LogUtil.info(" 程序【" + currBoard.boardName + "】插件【" + currPoint.pointName + "】,X轴【" + currPoint.NodePositionX + "】,Y轴【" + currPoint.NodePositionY + "】");
TSAVBean.ShowPoint(currPoint.NodePositionX, currPoint.NodePositionY,currPoint.PointType,currPoint.PointSizeX,currPoint.PointSizeY,currPoint.PenWidth,currPoint.PN);
TSAVBean.ShowPoint(currPoint.NodePositionX, currPoint.NodePositionY,currPoint.PointType,currPoint.PointSizeX,currPoint.PointSizeY,currPoint.PenWidth,currPoint.ShowText);
waitList.Add(WaitResultInfo.WaitTime(1000));
}
else
......
......@@ -853,24 +853,31 @@ namespace PUSICANLibrary
/// </summary>
public static bool IsHomeEnd(uint nodeId)
{
//判断忙碌状态=0并且外部停止2=1
int value = 0;
//获取状态
if (PUSIResult.RET_SUCCESS == PUSICANControl.ReadSDO(nodeId, CAN_Address.ControlStatus, out value))
try
{
string busyStr = Convert.ToString(value, 2).PadLeft(5, '0').Substring(1, 1);
string stop2Str = Convert.ToString(value, 2).PadLeft(5, '0').Substring(3, 1);
string stop1Str = Convert.ToString(value, 2).PadLeft(5, '0').Substring(4, 1);
if (busyStr.Equals("0") && (stop2Str.Equals("1") || stop1Str.Equals("1")))
//判断忙碌状态=0并且外部停止2=1
int value = 0;
//获取状态
if (PUSIResult.RET_SUCCESS == PUSICANControl.ReadSDO(nodeId, CAN_Address.ControlStatus, out value))
{
return true ;
string busyStr = Convert.ToString(value, 2).PadLeft(5, '0').Substring(1, 1);
string stop2Str = Convert.ToString(value, 2).PadLeft(5, '0').Substring(3, 1);
string stop1Str = Convert.ToString(value, 2).PadLeft(5, '0').Substring(4, 1);
if (busyStr.Equals("0") && (stop2Str.Equals("1") || stop1Str.Equals("1")))
{
return true;
}
else
{
return false;
}
}
else
{
return false ;
}
}
return false ;
return false;
}
catch (Exception e) {
LogUtil.error("IsHomeEnd:" + e.ToString());
}
return false;
}
public static void DefatutPosMove(uint node, int targetDefaultPosition, int movetype = 0,bool isLineSlv=false )
{
......
......@@ -18,11 +18,11 @@
<DebugType>full</DebugType>
<Optimize>false</Optimize>
<OutputPath>bin\Debug\</OutputPath>
<DefineConstants>TRACE;DEBUG;x86</DefineConstants>
<DefineConstants>TRACE;DEBUG;x64</DefineConstants>
<ErrorReport>prompt</ErrorReport>
<WarningLevel>4</WarningLevel>
<Prefer32Bit>false</Prefer32Bit>
<PlatformTarget>x86</PlatformTarget>
<PlatformTarget>AnyCPU</PlatformTarget>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
<DebugType>pdbonly</DebugType>
......
......@@ -21,7 +21,8 @@ namespace TSA_V
//cmbAoiFile.Items.Clear();
FileList = new List<string>();
if (cmbAoiFile!=null)
FileList.Add(ResourceCulture.GetString("不使用AOI"));
string appPath = Application.StartupPath;
string pathName = ConfigAppSettings.GetValue(Setting_Init.AOIFileConfig);
string filePath = appPath + pathName;
......@@ -47,7 +48,7 @@ namespace TSA_V
if (String.IsNullOrEmpty(DefaultName).Equals(false)&& FileList.Contains(DefaultName))
{
FileList.Remove(DefaultName);
FileList.Insert(0, DefaultName);
FileList.Insert(1, DefaultName);
}
if (cmbAoiFile != null)
{
......
......@@ -109,11 +109,11 @@
<!--组装工作前,需要输入pcb板条码-->
<add key="NeedPCBCode" value="0" />
<!--是否调整气缸顺序,配置1时,侧挡气缸前进,底部气缸在上升-->
<add key ="SideCylinderMoveFirst" value ="1"/>
<add key="SideCylinderMoveFirst" value="1" />
<!--是否启用工作区信号触发开始工作-->
<add key ="WorkSingleStart" value ="0"/>
<add key="WorkSingleStart" value="0" />
<!--点位描述显示类型-->
<add key ="PointDisplayType" value="1"/>
<add key="PointDisplayType" value="1" />
</appSettings>
<log4net>
<appender name="defaultAppender" type="log4net.Appender.RollingFileAppender">
......@@ -146,7 +146,7 @@
</logger>
</log4net>
<startup>
<supportedRuntime version="v4.0" sku=".NETFramework,Version=v4.6.1" />
<supportedRuntime version="v4.0" sku=".NETFramework,Version=v4.7.2" />
</startup>
<runtime>
<assemblyBinding xmlns="urn:schemas-microsoft-com:asm.v1">
......@@ -478,4 +478,4 @@
</dependentAssembly>
</assemblyBinding>
</runtime>
</configuration>
\ No newline at end of file
</configuration>
......@@ -318,10 +318,8 @@
//
this.groupBox2.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.groupBox2.Controls.Add(this.lblLineW);
this.groupBox2.Controls.Add(this.btnConfigAOI);
this.groupBox2.Controls.Add(this.label8);
this.groupBox2.Controls.Add(this.txtLineWidth);
this.groupBox2.Controls.Add(this.lbllinews);
this.groupBox2.Controls.Add(this.label122);
this.groupBox2.Controls.Add(this.btnUpdate);
this.groupBox2.Controls.Add(this.label9);
......@@ -344,9 +342,9 @@
// lblLineW
//
this.lblLineW.Font = new System.Drawing.Font("微软雅黑", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.lblLineW.Location = new System.Drawing.Point(489, 25);
this.lblLineW.Location = new System.Drawing.Point(367, 63);
this.lblLineW.Name = "lblLineW";
this.lblLineW.Size = new System.Drawing.Size(80, 20);
this.lblLineW.Size = new System.Drawing.Size(112, 20);
this.lblLineW.TabIndex = 297;
this.lblLineW.Text = "线体宽度:";
this.lblLineW.TextAlign = System.Drawing.ContentAlignment.MiddleRight;
......@@ -365,7 +363,7 @@
// txtLineWidth
//
this.txtLineWidth.Font = new System.Drawing.Font("微软雅黑", 12F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.txtLineWidth.Location = new System.Drawing.Point(573, 21);
this.txtLineWidth.Location = new System.Drawing.Point(483, 59);
this.txtLineWidth.MaxLength = 8;
this.txtLineWidth.Name = "txtLineWidth";
this.txtLineWidth.Size = new System.Drawing.Size(70, 29);
......@@ -375,7 +373,7 @@
//
this.lbllinews.AutoSize = true;
this.lbllinews.Font = new System.Drawing.Font("微软雅黑", 9F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.lbllinews.Location = new System.Drawing.Point(646, 27);
this.lbllinews.Location = new System.Drawing.Point(556, 65);
this.lbllinews.Name = "lbllinews";
this.lbllinews.Size = new System.Drawing.Size(30, 17);
this.lbllinews.TabIndex = 296;
......@@ -881,10 +879,12 @@
//
// groupBox1
//
this.groupBox1.Controls.Add(this.btnConfigAOI);
this.groupBox1.Controls.Add(this.lblAoi);
this.groupBox1.Controls.Add(this.lblLineW);
this.groupBox1.Controls.Add(this.cmbAOIFile);
this.groupBox1.Controls.Add(this.txtCode);
this.groupBox1.Controls.Add(this.txtLineWidth);
this.groupBox1.Controls.Add(this.lbllinews);
this.groupBox1.Controls.Add(this.txtBoardName);
this.groupBox1.Controls.Add(this.label14);
this.groupBox1.Controls.Add(this.label2);
......@@ -900,7 +900,7 @@
//
this.btnConfigAOI.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this.btnConfigAOI.Font = new System.Drawing.Font("微软雅黑", 10.5F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.btnConfigAOI.Location = new System.Drawing.Point(454, 58);
this.btnConfigAOI.Location = new System.Drawing.Point(-47, 15);
this.btnConfigAOI.Name = "btnConfigAOI";
this.btnConfigAOI.Size = new System.Drawing.Size(100, 30);
this.btnConfigAOI.TabIndex = 284;
......
......@@ -554,8 +554,10 @@ namespace TSA_V
board.LineWidth = FormUtil.GetIntValue(txtLineWidth);
board.boardCode = FormUtil.getValue(txtCode);
board.orgType = orgType;
board.AOIProName = cmbAOIFile.Text;
if (cmbAOIFile.SelectedIndex == 0)
board.AOIProName = "";
else
board.AOIProName = cmbAOIFile.Text;
if (board.boardName.Equals(""))
{
MessageBox.Show(ResourceCulture.GetString(ResourceCulture.WritePName, "请输入程序名称"));
......@@ -1030,7 +1032,7 @@ namespace TSA_V
{
if (e.Button == MouseButtons.Right)
{
if (e.RowIndex >= 0)
if (e.RowIndex >= 0 && e.ColumnIndex>=0)
{
this.dgvList.ClearSelection();
dgvList.Rows[e.RowIndex].Selected = true;
......
......@@ -323,7 +323,7 @@ namespace TSA_V
}
if (BoardManager.getBoardByName(board.boardName) != null)
{
MessageBox.Show(ResourceCulture.GetString("导入失败:程序名已存在"), ResourceCulture.GetString("提示"), MessageBoxButtons.OK, MessageBoxIcon.Error);
MessageBox.Show(ResourceCulture.GetString("导入失败:程序名已存在")+"["+ board.boardName + "]", ResourceCulture.GetString("提示"), MessageBoxButtons.OK, MessageBoxIcon.Error);
return;
}
BoardManager.ImportBoard(board);
......
......@@ -95,7 +95,7 @@ namespace TSA_V
this.btnWork.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this.btnWork.Font = new System.Drawing.Font("微软雅黑", 21.75F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.btnWork.ForeColor = System.Drawing.SystemColors.ButtonHighlight;
this.btnWork.Location = new System.Drawing.Point(87, 107);
this.btnWork.Location = new System.Drawing.Point(119, 116);
this.btnWork.Name = "btnWork";
this.btnWork.Size = new System.Drawing.Size(261, 312);
this.btnWork.TabIndex = 0;
......@@ -159,7 +159,7 @@ namespace TSA_V
this.btnHistory.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this.btnHistory.Font = new System.Drawing.Font("微软雅黑", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.btnHistory.ForeColor = System.Drawing.Color.White;
this.btnHistory.Location = new System.Drawing.Point(354, 319);
this.btnHistory.Location = new System.Drawing.Point(386, 328);
this.btnHistory.Name = "btnHistory";
this.btnHistory.Size = new System.Drawing.Size(180, 100);
this.btnHistory.TabIndex = 20;
......@@ -254,7 +254,7 @@ namespace TSA_V
this.btnReplenish.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this.btnReplenish.Font = new System.Drawing.Font("微软雅黑", 12F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.btnReplenish.ForeColor = System.Drawing.Color.White;
this.btnReplenish.Location = new System.Drawing.Point(540, 213);
this.btnReplenish.Location = new System.Drawing.Point(572, 222);
this.btnReplenish.Name = "btnReplenish";
this.btnReplenish.Size = new System.Drawing.Size(180, 100);
this.btnReplenish.TabIndex = 8;
......@@ -270,7 +270,7 @@ namespace TSA_V
this.btnMaintenance.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this.btnMaintenance.Font = new System.Drawing.Font("微软雅黑", 12F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.btnMaintenance.ForeColor = System.Drawing.Color.White;
this.btnMaintenance.Location = new System.Drawing.Point(540, 107);
this.btnMaintenance.Location = new System.Drawing.Point(572, 116);
this.btnMaintenance.Name = "btnMaintenance";
this.btnMaintenance.Size = new System.Drawing.Size(180, 100);
this.btnMaintenance.TabIndex = 8;
......@@ -286,7 +286,7 @@ namespace TSA_V
this.btnCom.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this.btnCom.Font = new System.Drawing.Font("微软雅黑", 12F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.btnCom.ForeColor = System.Drawing.Color.White;
this.btnCom.Location = new System.Drawing.Point(540, 319);
this.btnCom.Location = new System.Drawing.Point(572, 328);
this.btnCom.Name = "btnCom";
this.btnCom.Size = new System.Drawing.Size(180, 100);
this.btnCom.TabIndex = 5;
......@@ -302,7 +302,7 @@ namespace TSA_V
this.btnAOI.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this.btnAOI.Font = new System.Drawing.Font("微软雅黑", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.btnAOI.ForeColor = System.Drawing.Color.White;
this.btnAOI.Location = new System.Drawing.Point(354, 213);
this.btnAOI.Location = new System.Drawing.Point(386, 222);
this.btnAOI.Name = "btnAOI";
this.btnAOI.Size = new System.Drawing.Size(180, 100);
this.btnAOI.TabIndex = 1;
......@@ -318,7 +318,7 @@ namespace TSA_V
this.btnProduct.FlatStyle = System.Windows.Forms.FlatStyle.Flat;
this.btnProduct.Font = new System.Drawing.Font("微软雅黑", 15F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(134)));
this.btnProduct.ForeColor = System.Drawing.Color.White;
this.btnProduct.Location = new System.Drawing.Point(354, 107);
this.btnProduct.Location = new System.Drawing.Point(386, 116);
this.btnProduct.Name = "btnProduct";
this.btnProduct.Size = new System.Drawing.Size(180, 100);
this.btnProduct.TabIndex = 1;
......
......@@ -74,10 +74,10 @@ namespace TSA_V
// MessageBox.Show(ResourceCulture.GetString( "清先等待预热结束"));
// return;
//}
isClick = true;
isClick = true;
FrmBoardSelect fw = new FrmBoardSelect();
this.Visible = false; ;
this.Visible = false;
fw.ShowDialog();
this.Visible = true;
}
......
......@@ -2542,4 +2542,10 @@
<data name="DeviceInGohome" xml:space="preserve">
<value>11:34Backing to origin.</value>
</data>
<data name="不使用AOI" xml:space="preserve">
<value>Do not use AOI</value>
</data>
<data name="加载AOI失败." xml:space="preserve">
<value>Load AOI profile fail.</value>
</data>
</root>
\ No newline at end of file
......@@ -12,7 +12,7 @@ namespace TSA_V.Properties {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "15.6.0.0")]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "16.10.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
......
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="12.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="..\packages\Microsoft.ML.0.11.0\build\netstandard2.0\Microsoft.ML.props" Condition="Exists('..\packages\Microsoft.ML.0.11.0\build\netstandard2.0\Microsoft.ML.props')" />
<Import Project="..\packages\Microsoft.ML.CpuMath.0.11.0\build\netstandard2.0\Microsoft.ML.CpuMath.props" Condition="Exists('..\packages\Microsoft.ML.CpuMath.0.11.0\build\netstandard2.0\Microsoft.ML.CpuMath.props')" />
<Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
<PropertyGroup>
......@@ -11,14 +10,14 @@
<AppDesignerFolder>Properties</AppDesignerFolder>
<RootNamespace>TSA_V</RootNamespace>
<AssemblyName>Neo Station</AssemblyName>
<TargetFrameworkVersion>v4.6.1</TargetFrameworkVersion>
<TargetFrameworkVersion>v4.7.2</TargetFrameworkVersion>
<FileAlignment>512</FileAlignment>
<TargetFrameworkProfile />
<NuGetPackageImportStamp>
</NuGetPackageImportStamp>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<PlatformTarget>x86</PlatformTarget>
<PlatformTarget>AnyCPU</PlatformTarget>
<DebugSymbols>true</DebugSymbols>
<DebugType>full</DebugType>
<Optimize>false</Optimize>
......@@ -629,14 +628,12 @@
<ErrorText>这台计算机上缺少此项目引用的 NuGet 程序包。使用“NuGet 程序包还原”可下载这些程序包。有关更多信息,请参见 http://go.microsoft.com/fwlink/?LinkID=322105。缺少的文件是 {0}。</ErrorText>
</PropertyGroup>
<Error Condition="!Exists('..\packages\Microsoft.ML.CpuMath.0.11.0\build\netstandard2.0\Microsoft.ML.CpuMath.props')" Text="$([System.String]::Format('$(ErrorText)', '..\packages\Microsoft.ML.CpuMath.0.11.0\build\netstandard2.0\Microsoft.ML.CpuMath.props'))" />
<Error Condition="!Exists('..\packages\Microsoft.ML.0.11.0\build\netstandard2.0\Microsoft.ML.props')" Text="$([System.String]::Format('$(ErrorText)', '..\packages\Microsoft.ML.0.11.0\build\netstandard2.0\Microsoft.ML.props'))" />
<Error Condition="!Exists('..\packages\Microsoft.ML.0.11.0\build\netstandard2.0\Microsoft.ML.targets')" Text="$([System.String]::Format('$(ErrorText)', '..\packages\Microsoft.ML.0.11.0\build\netstandard2.0\Microsoft.ML.targets'))" />
<Error Condition="!Exists('..\packages\Stub.System.Data.SQLite.Core.NetFramework.1.0.113.3\build\net46\Stub.System.Data.SQLite.Core.NetFramework.targets')" Text="$([System.String]::Format('$(ErrorText)', '..\packages\Stub.System.Data.SQLite.Core.NetFramework.1.0.113.3\build\net46\Stub.System.Data.SQLite.Core.NetFramework.targets'))" />
</Target>
<Import Project="..\packages\Microsoft.ML.0.11.0\build\netstandard2.0\Microsoft.ML.targets" Condition="Exists('..\packages\Microsoft.ML.0.11.0\build\netstandard2.0\Microsoft.ML.targets')" />
<Import Project="..\packages\Stub.System.Data.SQLite.Core.NetFramework.1.0.113.3\build\net46\Stub.System.Data.SQLite.Core.NetFramework.targets" Condition="Exists('..\packages\Stub.System.Data.SQLite.Core.NetFramework.1.0.113.3\build\net46\Stub.System.Data.SQLite.Core.NetFramework.targets')" />
<PropertyGroup>
<PostBuildEvent>"C:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\VC\Tools\MSVC\14.29.30133\bin\Hostx64\x86\editbin" /largeaddressaware "$(TargetPath)"</PostBuildEvent>
<PostBuildEvent>echo "C:\Program Files (x86)\Microsoft Visual Studio\2019\Professional\VC\Tools\MSVC\14.29.30133\bin\Hostx64\x86\editbin" /largeaddressaware "$(TargetPath)"
start $(TargetDir)</PostBuildEvent>
</PropertyGroup>
<!-- To modify your build process, add your task inside one of the targets below and uncomment it.
Other similar extension points exist, see Microsoft.Common.targets.
......
<?xml version="1.0" encoding="utf-8"?>
<packages>
<package id="Microsoft.Data.DataView" version="0.11.0" targetFramework="net461" />
<package id="Microsoft.ML" version="0.11.0" targetFramework="net461" />
<package id="Microsoft.ML.CpuMath" version="0.11.0" targetFramework="net461" />
<package id="Newtonsoft.Json" version="12.0.1" targetFramework="net461" />
<package id="Stub.System.Data.SQLite.Core.NetFramework" version="1.0.113.3" targetFramework="net461" />
......
......@@ -35,7 +35,7 @@ namespace TSA_V
this.Text = "显示测试_" + ScreenIndex;
this.StartPosition = FormStartPosition.Manual;
}
public bool CloseShowName=true;
public bool CloseShowName=false;
public bool ShowForm()
{
Screen[] sc = Screen.AllScreens;
......@@ -280,6 +280,7 @@ namespace TSA_V
if (!name.Equals(""))
{
int size = (sizeX + sizeY) / 2;
size = 30;
if (!CloseShowName)
{
g.DrawString(name, new Font("Arial ", size, FontStyle.Regular), Brushes.White, x - size + 5, y + size + 2);
......
......@@ -315,11 +315,11 @@ namespace TSA_V
chbOffLine.Checked = false;
chbOffLine.Visible = false;
}
//else
//{
// chbWorkSingleStart.Visible = true;
// chbOffLine.Visible = true;
//}
else
{
chbWorkSingleStart.Visible = true;
chbOffLine.Visible = true;
}
}
private void chbOffLine_CheckedChanged(object sender, EventArgs e)
......
......@@ -67,14 +67,17 @@ namespace TSA_V
else
{
LogUtil.info("配置程序之后才能工作!");
this.Close();
}
if (TSAVBean.IsNeedAOI)
if (TSAVBean.IsNeedAOI && !string.IsNullOrEmpty(board.AOIProName))
{
LoadAoi();
if (CurrProject == null)
{
this.Close();
LogUtil.info("没有读取到AOI,结束,IsNeedAOI=true!");
MessageBox.Show(ResourceCulture.GetString("加载AOI失败."));
//this.Close();
}
}
if (!IsSet)
......@@ -526,14 +529,15 @@ namespace TSA_V
lblPartNum.Text = smtPoint.TagNo;
lblPointName.Text = smtPoint.PN;
ComponetInfo com = CSVBomManager.GetCom(BoardManager.CurrBoard.bomName, smtPoint);
TSAVPosition position = CSVPositionReader<TSAVPosition>.GetPositonByNum(com.PositionNum);
if (position != null)
{
lblPositionNum.Text = position.PositionNum;
}
if (com != null)
{
TSAVPosition position = CSVPositionReader<TSAVPosition>.GetPositonByNum(com.PositionNum);
if (position != null)
{
lblPositionNum.Text = position.PositionNum;
}
lblComDes.Text = com.ComponentDes;
this.lblComName.Text = com.PN;
lblCount.Text = com.ComCount.ToString();
......
MIT License
Copyright (c) 2018 .NET Foundation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
ML.NET uses third-party libraries or other resources that may be
distributed under licenses different than the ML.NET software.
In the event that we accidentally failed to list a required notice, please
bring it to our attention. Post an issue or email us:
dotnet@microsoft.com
The attached notices are provided for information only.
License notice for LIBMF
------------------------
https://github.com/cjlin1/libmf
Copyright (c) 2014-2015 The LIBMF Project.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither name of copyright holders nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
\ No newline at end of file
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<!--
NuGet packages.config doesn't support native assemblies automatically,
so copy the native assemblies to the output directory.
-->
<ItemGroup Condition="Exists('packages.config') OR
Exists('$(MSBuildProjectName).packages.config') OR
Exists('packages.$(MSBuildProjectName).config')">
<Content Include="$(MSBuildThisFileDirectory)\..\..\runtimes\win-x64\native\*.dll"
Condition="'$(PlatformTarget)' == 'x64'">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<Visible>false</Visible>
<Link>%(Filename)%(Extension)</Link>
</Content>
<Content Include="$(MSBuildThisFileDirectory)\..\..\runtimes\win-x86\native\*.dll"
Condition="'$(PlatformTarget)' == 'x86'">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<Visible>false</Visible>
<Link>%(Filename)%(Extension)</Link>
</Content>
</ItemGroup>
</Project>
\ No newline at end of file
<Project xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup>
<EnableMLUnsupportedPlatformTargetCheck Condition="'$(EnableMLUnsupportedPlatformTargetCheck)' == ''">true</EnableMLUnsupportedPlatformTargetCheck>
</PropertyGroup>
<Target Name="_CheckForUnsupportedPlatformTarget"
Condition="'$(EnableMLUnsupportedPlatformTargetCheck)' == 'true'"
AfterTargets="_CheckForInvalidConfigurationAndPlatform">
<!--
Special case .NET Core portable applications. When building a portable .NET Core app,
the PlatformTarget is empty, and you don't know until runtime (i.e. which dotnet.exe)
what processor architecture will be used.
-->
<Error Condition="('$(PlatformTarget)' != 'x64' AND '$(PlatformTarget)' != 'x86') AND
('$(OutputType)' == 'Exe' OR '$(OutputType)'=='WinExe') AND
!('$(TargetFrameworkIdentifier)' == '.NETCoreApp' AND '$(PlatformTarget)' == '')"
Text="Microsoft.ML currently supports 'x64' and 'x86' processor architectures. Please ensure your application is targeting 'x64' or 'x86'." />
</Target>
</Project>
\ No newline at end of file
<?xml version="1.0"?>
<doc>
<assembly>
<name>Microsoft.ML.KMeansClustering</name>
</assembly>
<members>
<member name="T:Microsoft.ML.KMeansClusteringExtensions">
<summary>
The trainer context extensions for the <see cref="T:Microsoft.ML.Trainers.KMeansPlusPlusTrainer"/>.
</summary>
</member>
<member name="M:Microsoft.ML.KMeansClusteringExtensions.KMeans(Microsoft.ML.ClusteringCatalog.ClusteringTrainers,System.String,System.String,System.Int32)">
<summary>
Train a KMeans++ clustering algorithm.
</summary>
<param name="catalog">The clustering catalog trainer object.</param>
<param name="featureColumnName">The name of the feature column.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="clustersCount">The number of clusters to use for KMeans.</param>
<example>
<format type="text/markdown">
<![CDATA[
[!code-csharp[KMeans](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Dynamic/KMeans.cs)]
]]></format>
</example>
</member>
<member name="M:Microsoft.ML.KMeansClusteringExtensions.KMeans(Microsoft.ML.ClusteringCatalog.ClusteringTrainers,Microsoft.ML.Trainers.KMeansPlusPlusTrainer.Options)">
<summary>
Train a KMeans++ clustering algorithm.
</summary>
<param name="catalog">The clustering catalog trainer object.</param>
<param name="options">Algorithm advanced options.</param>
</member>
<member name="T:Microsoft.ML.Trainers.KMeansModelParameters">
<example>
<format type="text/markdown">
<![CDATA[
[!code-csharp[KMeans](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Dynamic/KMeans.cs)]
]]></format>
</example>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansModelParameters.GetVersionInfo">
<summary>
Version information to be saved in binary format
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansModelParameters.#ctor(Microsoft.ML.IHostEnvironment,System.Int32,Microsoft.ML.Data.VBuffer{System.Single}[],System.Boolean)">
<summary>
Initialize predictor with a trained model.
</summary>
<param name="env">The host environment</param>
<param name="k">Number of centroids</param>
<param name="centroids">Coordinates of the centroids</param>
<param name="copyIn">If true then the <paramref name="centroids"/> vectors will be subject to
a deep copy, if false then this constructor will take ownership of the passed in centroid vectors.
If false then the caller must take care to not use or modify the input vectors once this object
is constructed, and should probably remove all references.</param>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansModelParameters.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.ModelLoadContext)">
<summary>
Initialize predictor from a binary file.
</summary>
<param name="ctx">The load context</param>
<param name="env">The host environment</param>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansModelParameters.SaveCore(Microsoft.ML.ModelSaveContext)">
<summary>
Save the predictor in binary format.
</summary>
<param name="ctx">The context to save to</param>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansModelParameters.Create(Microsoft.ML.IHostEnvironment,Microsoft.ML.ModelLoadContext)">
<summary>
This method is called by reflection to instantiate a predictor.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansModelParameters.InitPredictor">
<summary>
Initialize internal parameters: L2 norms of the _centroids.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansModelParameters.GetClusterCentroids(Microsoft.ML.Data.VBuffer{System.Single}[]@,System.Int32@)">
<summary>
Copies the centroids to a set of provided buffers.
</summary>
<param name="centroids">The buffer to which to copy. Will be extended to
an appropriate length, if necessary.</param>
<param name="k">The number of clusters, corresponding to the logical size of
<paramref name="centroids"/>.</param>
</member>
<member name="T:Microsoft.ML.Trainers.KMeansPlusPlusTrainer">
<summary>
K-means is a popular clustering algorithm. With K-means, the data is clustered into a specified
number of clusters in order to minimize the within-cluster sum of squares.
</summary><remarks>
K-means++ improves upon K-means by using the <a href="https://research.microsoft.com/apps/pubs/default.aspx?id=252149">Yinyang K-Means</a> method for choosing the initial cluster centers.
YYK-Means accelerates K-Means up to an order of magnitude while producing exactly the same clustering results (modulo floating point precision issues).
YYK-Means observes that there is a lot of redundancy across iterations in the KMeans algorithms and most points do not change their clusters during an iteration.
It uses various bounding techniques to identify this redundancy and eliminate many distance computations and optimize centroid computations.
<para>For more information on K-means, and K-means++ see:</para>
<list type="bullet">
<item><description><a href="https://en.wikipedia.org/wiki/K-means_clustering">K-means</a></description></item>
<item><description><a href="https://en.wikipedia.org/wiki/K-means%2b%2b">K-means++</a></description></item>
</list>
</remarks>
</member>
<member name="F:Microsoft.ML.Trainers.KMeansPlusPlusTrainer.Defaults.ClustersCount">
<value>The number of clusters.</value>
</member>
<member name="F:Microsoft.ML.Trainers.KMeansPlusPlusTrainer.Options.ClustersCount">
<summary>
The number of clusters.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.KMeansPlusPlusTrainer.Options.InitAlgorithm">
<summary>
Cluster initialization algorithm.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.KMeansPlusPlusTrainer.Options.OptimizationTolerance">
<summary>
Tolerance parameter for trainer convergence. Low = slower, more accurate.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.KMeansPlusPlusTrainer.Options.MaxIterations">
<summary>
Maximum number of iterations.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.KMeansPlusPlusTrainer.Options.AccelerationMemoryBudgetMb">
<summary>
Memory budget (in MBs) to use for KMeans acceleration.
</summary>
</member>
<member name="F:Microsoft.ML.Trainers.KMeansPlusPlusTrainer.Options.NumThreads">
<summary>
Degree of lock-free parallelism. Defaults to automatic. Determinism not guaranteed.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansPlusPlusTrainer.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.Trainers.KMeansPlusPlusTrainer.Options)">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.KMeansPlusPlusTrainer"/>
</summary>
<param name="env">The <see cref="T:Microsoft.ML.IHostEnvironment"/> to use.</param>
<param name="options">The advanced options of the algorithm.</param>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansPlusPlusInit.Initialize(Microsoft.ML.IHost,System.Int32,Microsoft.ML.IChannel,Microsoft.ML.Trainers.FeatureFloatVectorCursor.Factory,System.Int32,System.Int32,Microsoft.ML.Data.VBuffer{System.Single}[],System.Int64@,System.Int64@,System.Boolean)">
<summary>
Initialize starting centroids via KMeans++ algorithm. This algorithm will always run single-threaded,
regardless of the value of <paramref name="numThreads" />.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.KMeansAcceleratedRowMap">
<summary>
An instance of this class is used by SharedStates in YinYangTrainer
and KMeansBarBarInitialization. It effectively bounds MaxInstancesToAccelerate and
initializes RowIndexGetter.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansAcceleratedRowMap.BuildParallelIndexLookup(Microsoft.ML.Trainers.FeatureFloatVectorCursor.Factory)">
<summary>
Initializes the parallel index lookup HashArray using a sequential RowCursor. We
preinitialize the HashArray so we can perform lock-free lookup operations during
the primary KMeans pass.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.KMeansBarBarInitialization.SharedState">
<summary>
Data for optimizing KMeans|| initialization. Very similar to SharedState class
For every instance, there is a space for the best weight and best cluster computed.
In this class, new clusters mean the clusters that were added to the cluster set
in the previous round of KMeans|| and old clusters are the rest of them (the ones
that were added in the rounds before the previous one).
In every round of KMeans||, numSamplesPerRound new clusters are added to the set of clusters.
There are 'numRounds' number of rounds. We compute and store the distance of each new
cluster from every round to all of the previous clusters and use it
to avoid unnecessary computation by applying the triangle inequality.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansBarBarInitialization.SharedState.SetInstanceCluster(System.Int32,System.Single,System.Int32)">
<summary>
When assigning an accelerated row to a cluster, we store away the weight
to its closest cluster, as well as the identity of the new
closest cluster. Note that bestWeight can be negative since it is
corresponding to the weight of a distance which does not have
the L2 norm of the point itself.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansBarBarInitialization.SharedState.SetClusterDistance(System.Int32,Microsoft.ML.Data.VBuffer{System.Single}@,System.Single,System.Int32,Microsoft.ML.Data.VBuffer{System.Single}@,System.Single)">
<summary>
Computes and stores the distance of a new cluster to an old cluster
<paramref name="newClusterFeatures"/> must be between 0..numSamplesPerRound-1.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansBarBarInitialization.SharedState.CanWeightComputationBeAvoided(System.Single,System.Int32,System.Int32)">
<summary>
This function is the key to use triangle inequality. Given an instance x distance to the best
old cluster, cOld, and distance of a new cluster, cNew, to cOld, this function evaluates whether
the distance computation of dist(x,cNew) can be avoided.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansBarBarInitialization.FindBestCluster(Microsoft.ML.Data.VBuffer{System.Single}@,System.Int32,Microsoft.ML.Trainers.KMeansBarBarInitialization.SharedState,System.Int32,System.Int32,Microsoft.ML.Data.VBuffer{System.Single}[],System.Single[],System.Boolean,System.Boolean,System.Single@,System.Int32@)">
<summary>
This function finds the best cluster and the best weight for an instance using
smart triangle inequality to avoid unnecessary weight computations.
Note that <paramref name="needToStoreWeight"/> is used to avoid the storing the new cluster in
final round. After the final round, best cluster information will be ignored.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansBarBarInitialization.ComputeAccelerationMemoryRequirement(System.Int64,System.Int32,System.Int32,System.Boolean,System.Int64@,System.Int64@)">
<summary>
This method computes the memory requirement for _clusterDistances in SharedState (clusterBytes) and
the maximum number of instances whose weight to the closest cluster can be memorized in order to avoid
recomputation later.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansBarBarInitialization.Initialize(Microsoft.ML.IHost,System.Int32,Microsoft.ML.IChannel,Microsoft.ML.Trainers.FeatureFloatVectorCursor.Factory,System.Int32,System.Int32,Microsoft.ML.Data.VBuffer{System.Single}[],System.Int64,System.Int64@,System.Int64@)">
<summary>
KMeans|| Implementation, see https://theory.stanford.edu/~sergei/papers/vldb12-kmpar.pdf
This algorithm will require:
- (k * overSampleFactor * rounds * diminsionality * 4) bytes for the final sampled clusters.
- (k * overSampleFactor * numThreads * diminsionality * 4) bytes for the per-round sampling.
Uses memory in initializationState to cache distances and avoids unnecessary distance computations
akin to YinYang-KMeans paper.
Everywhere in this function, weight of an instance x from a cluster c means weight(x,c) = dist(x,c)^2-norm(x)^2.
We store weight in most cases to avoid unnecessary computation of norm(x).
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansRandomInit.Initialize(Microsoft.ML.IHost,System.Int32,Microsoft.ML.IChannel,Microsoft.ML.Trainers.FeatureFloatVectorCursor.Factory,System.Int32,Microsoft.ML.Data.VBuffer{System.Single}[],System.Int64@,System.Int64@)">
<summary>
Initialize starting centroids via reservoir sampling.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansLloydsYinYangTrain.WorkChunkStateBase.Reduce(Microsoft.ML.Trainers.KMeansLloydsYinYangTrain.WorkChunkState[],Microsoft.ML.Trainers.KMeansLloydsYinYangTrain.ReducedWorkChunkState)">
<summary>
Reduces the array of work chunks into this chunk, coalescing the
results from multiple worker threads partitioned over a parallel cursor set and
clearing their values to prepare them for the next iteration.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansLloydsYinYangTrain.ReducedWorkChunkState.UpdateClusters(Microsoft.ML.Data.VBuffer{System.Single}[],System.Single[],System.Single[],System.Single@)">
<summary>
Updates all the passed in variables with the results of the most recent iteration
of cluster assignment. It is assumed that centroids will contain the previous results
of this call.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansLloydsYinYangTrain.SharedState.SetYinYangCluster(System.Int32,Microsoft.ML.Data.VBuffer{System.Single}@,System.Single,System.Int32,System.Single)">
<summary>
When assigning an accelerated row to a cluster, we store away the distance
to its closer and second closed cluster, as well as the identity of the new
closest cluster. This method returns the last known closest cluster.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansLloydsYinYangTrain.SharedState.UpdateYinYangBounds(System.Int32)">
<summary>
Updates the known YinYang bounds for the given row using the centroid position
deltas from the previous iteration.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansLloydsYinYangTrain.SharedState.IsYinYangGloballyBound(System.Int32)">
<summary>
Determines if the triangle distance inequality still applies to the given row,
allowing us to avoid per-cluster distance computation.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansLloydsYinYangTrain.ProcessChunk(Microsoft.ML.Trainers.FeatureFloatVectorCursor,Microsoft.ML.Trainers.KMeansLloydsYinYangTrain.SharedState,Microsoft.ML.Trainers.KMeansLloydsYinYangTrain.WorkChunkStateBase,System.Int32,Microsoft.ML.Data.VBuffer{System.Single}[],System.Single[])">
<summary>
Performs the 'update' step of KMeans. This method is passed a WorkChunkState. In the parallel version
this chunk will be one of _numThreads chunks and the RowCursor will be part of a RowCursorSet. In the
unthreaded version, this chunk will be the final chunk and hold state for the entire data set.
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansUtils.ParallelWeightedReservoirSample(Microsoft.ML.IHost,System.Int32,System.Int32,Microsoft.ML.Trainers.FeatureFloatVectorCursor.Factory,Microsoft.ML.Trainers.KMeansUtils.WeightFunc,Microsoft.ML.Trainers.KMeansUtils.RowIndexGetter,Microsoft.ML.Data.VBuffer{System.Single}[]@,Microsoft.ML.Internal.Utilities.Heap{Microsoft.ML.Trainers.KMeansUtils.WeightedPoint}[]@)">
<summary>
Performs a multithreaded version of weighted reservior sampling, returning
an array of numSamples, where each sample has been selected from the
data set with a probability of numSamples/N * weight/(sum(weight)). Buffer
is sized to the number of threads plus one and stores the minheaps needed to
perform the per-thread reservior samples.
This method assumes that the numSamples is much smaller than the full dataset as
it expects to be able to sample numSamples * numThreads.
This is based on the 'A-Res' algorithm in 'Weighted Random Sampling', 2005; Efraimidis, Spirakis:
https://utopia.duth.gr/~pefraimi/research/data/2007EncOfAlg.pdf
</summary>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansUtils.ParallelMapReduce``2(System.Int32,Microsoft.ML.IHost,Microsoft.ML.Trainers.FeatureFloatVectorCursor.Factory,Microsoft.ML.Trainers.KMeansUtils.RowIndexGetter,Microsoft.ML.Trainers.KMeansUtils.InitAction{``0},Microsoft.ML.Trainers.KMeansUtils.MapAction{``0},Microsoft.ML.Trainers.KMeansUtils.ReduceAction{``0,``1},``0[]@,``1@)">
<summary>
Takes a data cursor and perform an in-memory parallel aggregation operation on it. This
helper wraps some of the behavior common to parallel operations over a IRowCursor set,
including building the set, creating separate Random instances, and IRowCursor disposal.
</summary>
<typeparam name="TPartitionState">The type that each parallel cursor will be expected to aggregate to.</typeparam>
<typeparam name="TGlobalState">The type of the final output from combining each per-thread instance of TInterAgg.</typeparam>
<param name="numThreads"></param>
<param name="baseHost"></param>
<param name="factory"></param>
<param name="rowIndexGetter"></param>
<param name="initChunk">Initializes an instance of TInterAgg, or prepares/clears it if it is already allocated.</param>
<param name="mapper">Invoked for every row, should update TInterAgg using row cursor data.</param>
<param name="reducer">Invoked after all row cursors have completed, combines the entire array of TInterAgg instances into a final TAgg result.</param>
<param name="buffer">A reusable buffer array of TInterAgg.</param>
<param name="result">A reusable reference to the final result.</param>
<returns></returns>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansUtils.FindBestCluster(Microsoft.ML.Data.VBuffer{System.Single}@,Microsoft.ML.Data.VBuffer{System.Single}[],System.Single[],System.Int32,System.Boolean,System.Single@,System.Int32@,System.Single@,System.Int32@)">
<summary>
Given a point and a set of centroids this method will determine the closest centroid
using L2 distance. It will return a value equivalent to that distance, the index of the
closest cluster, and a value equivalent to the distance to the second-nearest cluster.
</summary>
<param name="features"></param>
<param name="centroids"></param>
<param name="centroidL2s">The L2 norms of the centroids. Used for efficiency and expected to be computed up front.</param>
<param name="centroidCount">The number of centroids. Must be less than or equal to the length of the centroid array.</param>
<param name="needRealDistance">Whether to return a real L2 distance, or a value missing the L2 norm of <paramref name="features"/>.</param>
<param name="minDistance">The distance between <paramref name="features"/> and the nearest centroid in <paramref name="centroids" />.</param>
<param name="cluster">The index of the nearest centroid.</param>
<param name="secMinDistance">The second nearest distance, or PosInf if <paramref name="centroids" /> only contains a single point.</param>
<param name="secCluster">The index of the second nearest centroid, or -1 if <paramref name="centroids" /> only contains a single point.</param>
</member>
<member name="M:Microsoft.ML.Trainers.KMeansUtils.VerifyModelConsistency(Microsoft.ML.Data.VBuffer{System.Single}[])">
<summary>
Checks that all coordinates of all centroids are finite, and throws otherwise
</summary>
</member>
</members>
</doc>
<?xml version="1.0"?>
<doc>
<assembly>
<name>Microsoft.ML.PCA</name>
</assembly>
<members>
<member name="M:Microsoft.ML.PcaCatalog.ProjectToPrincipalComponents(Microsoft.ML.TransformsCatalog.ProjectionTransforms,System.String,System.String,System.String,System.Int32,System.Int32,System.Boolean,System.Nullable{System.Int32})">
<summary>Initializes a new instance of <see cref="T:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator"/>.</summary>
<param name="catalog">The transform's catalog.</param>
<param name="outputColumnName">Name of the column resulting from the transformation of <paramref name="inputColumnName"/>.</param>
<param name="inputColumnName">Name of column to transform. If set to <see langword="null"/>, the value of the <paramref name="outputColumnName"/> will be used as source.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="rank">The number of principal components.</param>
<param name="overSampling">Oversampling parameter for randomized PrincipalComponentAnalysis training.</param>
<param name="center">If enabled, data is centered to be zero mean.</param>
<param name="seed">The seed for random number generation.</param>
</member>
<member name="M:Microsoft.ML.PcaCatalog.ProjectToPrincipalComponents(Microsoft.ML.TransformsCatalog.ProjectionTransforms,Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions[])">
<summary>Initializes a new instance of <see cref="T:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator"/>.</summary>
<param name="catalog">The transform's catalog.</param>
<param name="columns">Input columns to apply PrincipalComponentAnalysis on.</param>
</member>
<member name="M:Microsoft.ML.PcaCatalog.RandomizedPca(Microsoft.ML.AnomalyDetectionCatalog.AnomalyDetectionTrainers,System.String,System.String,System.Int32,System.Int32,System.Boolean,System.Nullable{System.Int32})">
<summary>
Trains an approximate PCA using Randomized SVD algorithm.
</summary>
<param name="catalog">The anomaly detection catalog trainer object.</param>
<param name="featureColumnName">The name of the feature column.</param>
<param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
<param name="rank">The number of components in the PCA.</param>
<param name="oversampling">Oversampling parameter for randomized PCA training.</param>
<param name="center">If enabled, data is centered to be zero mean.</param>
<param name="seed">The seed for random number generation.</param>
</member>
<member name="M:Microsoft.ML.PcaCatalog.RandomizedPca(Microsoft.ML.AnomalyDetectionCatalog.AnomalyDetectionTrainers,Microsoft.ML.Trainers.RandomizedPcaTrainer.Options)">
<summary>
Trains an approximate PCA using Randomized SVD algorithm.
</summary>
<param name="catalog">The anomaly detection catalog trainer object.</param>
<param name="options">Advanced options to the algorithm.</param>
</member>
<member name="T:Microsoft.ML.Trainers.RandomizedPcaTrainer">
<summary>
This trainer trains an approximate PCA using Randomized SVD algorithm
Reference: https://web.stanford.edu/group/mmds/slides2010/Martinsson.pdf
</summary>
<remarks>
This PCA can be made into Kernel PCA by using Random Fourier Features transform
</remarks>
</member>
<member name="M:Microsoft.ML.Trainers.RandomizedPcaTrainer.#ctor(Microsoft.ML.IHostEnvironment,System.String,System.String,System.Int32,System.Int32,System.Boolean,System.Nullable{System.Int32})">
<summary>
Initializes a new instance of <see cref="T:Microsoft.ML.Trainers.RandomizedPcaTrainer"/>.
</summary>
<param name="env">The local instance of the <see cref="T:Microsoft.ML.IHostEnvironment"/>.</param>
<param name="features">The name of the feature column.</param>
<param name="weights">The name of the weight column.</param>
<param name="rank">The number of components in the PCA.</param>
<param name="oversampling">Oversampling parameter for randomized PCA training.</param>
<param name="center">If enabled, data is centered to be zero mean.</param>
<param name="seed">The seed for random number generation.</param>
</member>
<member name="M:Microsoft.ML.Trainers.RandomizedPcaTrainer.PostProcess(System.Single[][],System.Single[],System.Single[],System.Int32,System.Int32)">
<summary>
Modifies <paramref name="y"/> in place so it becomes <paramref name="y"/> * eigenvectors / eigenvalues.
</summary>
</member>
<member name="T:Microsoft.ML.Trainers.PcaModelParameters">
<summary>
PCA is a dimensionality-reduction transform which computes the projection of the feature vector onto a low-rank subspace.
</summary><remarks>
<a href="https://en.wikipedia.org/wiki/Principal_component_analysis">Principle Component Analysis (PCA)</a> is a dimensionality-reduction algorithm which computes the projection of the feature vector to onto a low-rank subspace.
Its training is done using the technique described in the paper: <a href="https://arxiv.org/pdf/1310.6304v2.pdf">Combining Structured and Unstructured Randomness in Large Scale PCA</a>,
and the paper <a href="https://arxiv.org/pdf/0909.4061v2.pdf">Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions</a>
<para>For more information, see also:</para>
<list type="bullet">
<item><description>
<a href="https://web.stanford.edu/group/mmds/slides2010/Martinsson.pdf">Randomized Methods for Computing the Singular Value Decomposition (SVD) of very large matrices</a>
</description></item>
<item><description>
<a href="https://arxiv.org/abs/0809.2274">A randomized algorithm for principal component analysis</a>
</description></item>
<item><description>
<a href="http://users.cms.caltech.edu/~jtropp/papers/HMT11-Finding-Structure-SIREV.pdf">Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions</a>
</description></item>
</list>
</remarks>
</member>
<member name="M:Microsoft.ML.Trainers.PcaModelParameters.#ctor(Microsoft.ML.IHostEnvironment,System.Int32,System.Single[][],Microsoft.ML.Data.VBuffer{System.Single}@)">
<summary>
Instantiate new model parameters from trained model.
</summary>
<param name="env">The host environment.</param>
<param name="rank">The rank of the PCA approximation of the covariance matrix. This is the number of eigenvectors in the model.</param>
<param name="eigenVectors">Array of eigenvectors.</param>
<param name="mean">The mean vector of the training data.</param>
</member>
<member name="M:Microsoft.ML.Trainers.PcaModelParameters.GetEigenVectors(Microsoft.ML.Data.VBuffer{System.Single}[]@,System.Int32@)">
<summary>
Copies the top eigenvectors of the covariance matrix of the training data
into a set of buffers.
</summary>
<param name="vectors">A possibly reusable set of vectors, which will
be expanded as necessary to accomodate the data.</param>
<param name="rank">Set to the rank, which is also the logical length
of <paramref name="vectors"/>.</param>
</member>
<member name="M:Microsoft.ML.Trainers.PcaModelParameters.GetMean(Microsoft.ML.Data.VBuffer{System.Single}@)">
<summary>
Copies the mean vector of the training data.
</summary>
</member>
<member name="T:Microsoft.ML.Transforms.PrincipalComponentAnalysisTransformer">
<summary>
PCA is a dimensionality-reduction transform which computes the projection of the feature vector onto a low-rank subspace.
</summary><remarks>
<a href="https://en.wikipedia.org/wiki/Principal_component_analysis">Principle Component Analysis (PCA)</a> is a dimensionality-reduction algorithm which computes the projection of the feature vector to onto a low-rank subspace.
Its training is done using the technique described in the paper: <a href="https://arxiv.org/pdf/1310.6304v2.pdf">Combining Structured and Unstructured Randomness in Large Scale PCA</a>,
and the paper <a href="https://arxiv.org/pdf/0909.4061v2.pdf">Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions</a>
<para>For more information, see also:</para>
<list type="bullet">
<item><description>
<a href="https://web.stanford.edu/group/mmds/slides2010/Martinsson.pdf">Randomized Methods for Computing the Singular Value Decomposition (SVD) of very large matrices</a>
</description></item>
<item><description>
<a href="https://arxiv.org/abs/0809.2274">A randomized algorithm for principal component analysis</a>
</description></item>
<item><description>
<a href="http://users.cms.caltech.edu/~jtropp/papers/HMT11-Finding-Structure-SIREV.pdf">Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions</a>
</description></item>
</list>
</remarks>
</member>
<member name="T:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator">
<summary>
PCA is a dimensionality-reduction transform which computes the projection of the feature vector onto a low-rank subspace.
</summary><remarks>
<a href="https://en.wikipedia.org/wiki/Principal_component_analysis">Principle Component Analysis (PCA)</a> is a dimensionality-reduction algorithm which computes the projection of the feature vector to onto a low-rank subspace.
Its training is done using the technique described in the paper: <a href="https://arxiv.org/pdf/1310.6304v2.pdf">Combining Structured and Unstructured Randomness in Large Scale PCA</a>,
and the paper <a href="https://arxiv.org/pdf/0909.4061v2.pdf">Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions</a>
<para>For more information, see also:</para>
<list type="bullet">
<item><description>
<a href="https://web.stanford.edu/group/mmds/slides2010/Martinsson.pdf">Randomized Methods for Computing the Singular Value Decomposition (SVD) of very large matrices</a>
</description></item>
<item><description>
<a href="https://arxiv.org/abs/0809.2274">A randomized algorithm for principal component analysis</a>
</description></item>
<item><description>
<a href="http://users.cms.caltech.edu/~jtropp/papers/HMT11-Finding-Structure-SIREV.pdf">Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions</a>
</description></item>
</list>
</remarks>
</member>
<member name="T:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions">
<summary>
Describes how the transformer handles one column pair.
</summary>
</member>
<member name="F:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions.Name">
<summary>
Name of the column resulting from the transformation of <see cref="F:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions.InputColumnName"/>.
</summary>
</member>
<member name="F:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions.InputColumnName">
<summary>
Name of column to transform.
</summary>
</member>
<member name="F:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions.WeightColumn">
<summary>
The name of the weight column.
</summary>
</member>
<member name="F:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions.Rank">
<summary>
The number of components in the PCA.
</summary>
</member>
<member name="F:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions.Oversampling">
<summary>
Oversampling parameter for randomized PCA training.
</summary>
</member>
<member name="F:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions.Center">
<summary>
If enabled, data is centered to be zero mean.
</summary>
</member>
<member name="F:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions.Seed">
<summary>
The seed for random number generation.
</summary>
</member>
<member name="M:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions.#ctor(System.String,System.String,System.String,System.Int32,System.Int32,System.Boolean,System.Nullable{System.Int32})">
<summary>
Describes how the transformer handles one column pair.
</summary>
<param name="name">Name of the column resulting from the transformation of <paramref name="inputColumnName"/>.</param>
<param name="inputColumnName">Name of column to transform.
If set to <see langword="null"/>, the value of the <paramref name="name"/> will be used as source.</param>
<param name="weightColumn">The name of the weight column.</param>
<param name="rank">The number of components in the PCA.</param>
<param name="overSampling">Oversampling parameter for randomized PCA training.</param>
<param name="center">If enabled, data is centered to be zero mean.</param>
<param name="seed">The random seed. If unspecified random state will be instead derived from the <see cref="T:Microsoft.ML.MLContext"/>.</param>
</member>
<member name="M:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.#ctor(Microsoft.ML.IHostEnvironment,System.String,System.String,System.String,System.Int32,System.Int32,System.Boolean,System.Nullable{System.Int32})">
<summary>
PCA is a dimensionality-reduction transform which computes the projection of the feature vector onto a low-rank subspace.
</summary><remarks>
<a href="https://en.wikipedia.org/wiki/Principal_component_analysis">Principle Component Analysis (PCA)</a> is a dimensionality-reduction algorithm which computes the projection of the feature vector to onto a low-rank subspace.
Its training is done using the technique described in the paper: <a href="https://arxiv.org/pdf/1310.6304v2.pdf">Combining Structured and Unstructured Randomness in Large Scale PCA</a>,
and the paper <a href="https://arxiv.org/pdf/0909.4061v2.pdf">Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions</a>
<para>For more information, see also:</para>
<list type="bullet">
<item><description>
<a href="https://web.stanford.edu/group/mmds/slides2010/Martinsson.pdf">Randomized Methods for Computing the Singular Value Decomposition (SVD) of very large matrices</a>
</description></item>
<item><description>
<a href="https://arxiv.org/abs/0809.2274">A randomized algorithm for principal component analysis</a>
</description></item>
<item><description>
<a href="http://users.cms.caltech.edu/~jtropp/papers/HMT11-Finding-Structure-SIREV.pdf">Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions</a>
</description></item>
</list>
</remarks>
<param name="env">The environment to use.</param>
<param name="outputColumnName">Name of the column resulting from the transformation of <paramref name="inputColumnName" />.</param>
<param name="inputColumnName">Name of the column to transform.
If set to <see langword="null" />, the value of the <paramref name="outputColumnName" /> will be used as source.</param>
<param name="weightColumn">The name of the weight column.</param>
<param name="rank">The number of components in the PCA.</param>
<param name="overSampling">Oversampling parameter for randomized PCA training.</param>
<param name="center">If enabled, data is centered to be zero mean.</param>
<param name="seed">The seed for random number generation.</param>
</member>
<member name="M:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.#ctor(Microsoft.ML.IHostEnvironment,Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.ColumnOptions[])">
<summary>
PCA is a dimensionality-reduction transform which computes the projection of the feature vector onto a low-rank subspace.
</summary><remarks>
<a href="https://en.wikipedia.org/wiki/Principal_component_analysis">Principle Component Analysis (PCA)</a> is a dimensionality-reduction algorithm which computes the projection of the feature vector to onto a low-rank subspace.
Its training is done using the technique described in the paper: <a href="https://arxiv.org/pdf/1310.6304v2.pdf">Combining Structured and Unstructured Randomness in Large Scale PCA</a>,
and the paper <a href="https://arxiv.org/pdf/0909.4061v2.pdf">Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions</a>
<para>For more information, see also:</para>
<list type="bullet">
<item><description>
<a href="https://web.stanford.edu/group/mmds/slides2010/Martinsson.pdf">Randomized Methods for Computing the Singular Value Decomposition (SVD) of very large matrices</a>
</description></item>
<item><description>
<a href="https://arxiv.org/abs/0809.2274">A randomized algorithm for principal component analysis</a>
</description></item>
<item><description>
<a href="http://users.cms.caltech.edu/~jtropp/papers/HMT11-Finding-Structure-SIREV.pdf">Finding Structure with Randomness: Probabilistic Algorithms for Constructing Approximate Matrix Decompositions</a>
</description></item>
</list>
</remarks>
<param name="env">The environment to use.</param>
<param name="columns">The dataset columns to use, and their specific settings.</param>
</member>
<member name="M:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.Fit(Microsoft.Data.DataView.IDataView)">
<summary>
Trains and returns a <see cref="T:Microsoft.ML.Transforms.PrincipalComponentAnalysisTransformer"/>.
</summary>
</member>
<member name="M:Microsoft.ML.Transforms.PrincipalComponentAnalysisEstimator.GetOutputSchema(Microsoft.ML.SchemaShape)">
<summary>
Returns the <see cref="T:Microsoft.ML.SchemaShape"/> of the schema which will be produced by the transformer.
Used for schema propagation and verification in a pipeline.
</summary>
</member>
</members>
</doc>
支持 Markdown 格式
你添加了 0 到此讨论。请谨慎行事。
Finish editing this message first!