当前位置: 首页 > news >正文

C++卷积神经网络

C++卷积神经网络

#include"TP_NNW.h"
#include<iostream>
#pragma warning(disable:4996)
using namespace std;
using namespace mnist;float* SGD(Weight* W1, Weight& W5, Weight& Wo, float** X)
{Vector2 ve(28, 28);float* temp = new float[10];Vector2 Cout;float*** y1 = Conv(X, ve, Cout, W1, 20);for (int i = 0; i < 20; i++)for (int n = 0; n < Cout.height; n++)for (int m = 0; m < Cout.width; m++)y1[i][n][m] = ReLU(y1[i][n][m]);float*** y2 = y1;Vector2 Cout2;float*** y3 = Pool(y1, Cout, 20, Cout2);float* y4 = reshape(y3, Cout2, 20, true);float* v5 = dot(W5, y4);float* y5 = ReLU(v5, W5);float* v = dot(Wo, y5);float* y = Softmax(v, Wo);for (int i = 0; i < Wo.len.height; i++)temp[i] = y[i];return temp;
}
void trainSGD(Weight* W1, Weight& W5, Weight& Wo, FILE* fp, FILE* tp)
{Vector2 ve(28, 28);unsigned char* reader = new unsigned char[ve.height * ve.width];float** X = apply2(ve.height, ve.width);unsigned char hao;hot_one<char> D(10);Weight* momentum1 = new Weight[20];//动量Weight momentum5;Weight momentumo;Weight* dW1 = new Weight[20];//动量Weight dW5;Weight dWo;for (int i = 0; i < 20; i++)W1[0] >> momentum1[i];W5 >> momentum5;Wo >> momentumo;int N = 8000;//训练集取前8000个int bsize = 100;//100个纠正一次int b_len;int* blist = bList(bsize, N, &b_len);for (int batch = 0; batch < b_len; batch++){for (int i = 0; i < 20; i++)W1[0] >> dW1[i];W5 >> dW5;Wo >> dWo;int begins = blist[batch];for (int k = begins; k < begins + bsize && k < N; k++){::fread(reader, sizeof(unsigned char), ve.height * ve.width, fp);//读取图像Toshape2(X, reader, ve);//组合成二维数组Vector2 Cout;//储存卷积后数组的尺寸  20float*** y1 = Conv(X, ve, Cout, W1, 20);//卷积for (int i = 0; i < 20; i++)for (int n = 0; n < Cout.height; n++){for (int m = 0; m < Cout.width; m++){y1[i][n][m] = ReLU(y1[i][n][m]);//通过ReLU函数}}float*** y2 = y1;//给变量y2Vector2 Cout2;//记录池化后的尺寸   10float*** y3 = Pool(y1, Cout, 20, Cout2);//池化层float* y4 = reshape(y3, Cout2, 20, true);//作为神经元输入float* v5 = dot(W5, y4);//矩阵乘法float* y5 = ReLU(v5, W5);//ReLU函数float* v = dot(Wo, y5);//举证乘法float* y = Softmax(v, Wo);//soft分类::fread(&hao, sizeof(unsigned char), 1, tp);//读取标签D.re(hao);float* e = new float[10];for (int i = 0; i < 10; i++)e[i] = ((float)D.one[i]) - y[i];float* delta = e;float* e5 = FXCB_err(Wo, delta);float* delta5 = Delta2(y5, e5, W5);float* e4 = FXCB_err(W5, delta5);float*** e3 = Toshape3(e4, 20, Cout2);float*** e2 = apply3(20, Cout.height, Cout.width);Weight one(2, 2, ones);/*for (int i = 0; i < 20; i++){::printf("第%d层\n", i);for (int n = 0; n < Cout2.height; n++){for (int m = 0; m < Cout2.width; m++)::printf("%0.3f ", e3[i][n][m]);puts("");}}getchar();*/for (int i = 0; i < 20; i++)//---------------------------------kron(e2[i], Cout, e3[i], Cout2, one.WG, one.len);/*for (int i = 0; i < 20; i++){::printf("第%d层\n", i);for (int n = 0; n < Cout.height; n++){for (int m = 0; m < Cout.width; m++)::printf("%f ", e2[i][n][m]);puts("");}}getchar();*/float*** delta2 = apply3(20, Cout.height, Cout.width);for (int i = 0; i < 20; i++)for (int n = 0; n < Cout.height; n++)for (int m = 0; m < Cout.width; m++)delta2[i][n][m] = (y2[i][n][m] > 0) * e2[i][n][m];float*** delta_x = (float***)malloc(sizeof(float***) * 20);Vector2 t1;for (int i = 0; i < 20; i++)delta_x[i] = conv2(X, ve, delta2[i], Cout, &t1);for (int i = 0; i < 20; i++)for (int n = 0; n < t1.height; n++)for (int m = 0; m < t1.width; m++)dW1[i].WG[n][m] += delta_x[i][n][m];dW5.re(delta5, y4, 1);dWo.re(delta, y5, 1);Free3(delta_x, 20, t1.height);Free3(delta2, 20, Cout.height);one.release();Free3(e2, 20, Cout.height);Free3(e3, 20, Cout2.height);free(e4);free(delta5);free(e5);free(v5);delete e;free(y5);free(v);free(y);Free3(y1, 20, Cout.height);free(y4);}for (int i = 0; i < 20; i++)dW1[i] /= (bsize);dW5 /= (bsize);dWo /= (bsize);for (int i = 0; i < 20; i++)for (int n = 0; n < W1[0].len.height; n++)for (int m = 0; m < W1[0].len.width; m++){momentum1[i].WG[n][m] = ALPHA * dW1[i].WG[n][m] + BETA * momentum1[i].WG[n][m];W1[i].WG[n][m] += momentum1[i].WG[n][m];}for (int n = 0; n < W5.len.height; n++)for (int m = 0; m < W5.len.width; m++)momentum5.WG[n][m] = ALPHA * dW5.WG[n][m] + BETA * momentum5.WG[n][m];W5 += momentum5;for (int n = 0; n < Wo.len.height; n++)for (int m = 0; m < Wo.len.width; m++)momentumo.WG[n][m] = ALPHA * dWo.WG[n][m] + BETA * momentumo.WG[n][m];Wo += momentumo;}for (int i = 0; i < 20; i++){momentum1[i].release();dW1[i].release();}momentum5.release();momentumo.release();Free2(X, ve.height);free(blist);delete reader;D.release();dW5.release();dWo.release();return;
}
void trainSGD1(Weight* W1, Weight& W5, Weight& Wo, FILE* fp, FILE* tp)
{Vector2 ve(28, 28);unsigned char* reader = new unsigned char[ve.height * ve.width];float** X = apply2(ve.height, ve.width);unsigned char hao;hot_one<char> D(10);Weight* momentum1 = new Weight[20];//动量Weight momentum5;Weight momentumo;Weight* dW1 = new Weight[20];//动量Weight dW5;Weight dWo;for (int i = 0; i < 20; i++)W1[0] >> momentum1[i];W5 >> momentum5;Wo >> momentumo;int N = 108;//训练集取前8000个int bsize = 12;//100个纠正一次int b_len;int* blist = bList(bsize, N, &b_len);for (int batch = 0; batch < b_len; batch++){for (int i = 0; i < 20; i++)W1[0] >> dW1[i];W5 >> dW5;Wo >> dWo;int begins = blist[batch];for (int k = begins; k < begins + bsize && k < N; k++){::fread(reader, sizeof(unsigned char), ve.height * ve.width, fp);//读取图像Toshape2(X, reader, ve);//组合成二维数组Vector2 Cout;//储存卷积后数组的尺寸  20float*** y1 = Conv(X, ve, Cout, W1, 20);//卷积for (int i = 0; i < 20; i++)for (int n = 0; n < Cout.height; n++){for (int m = 0; m < Cout.width; m++){y1[i][n][m] = ReLU(y1[i][n][m]);//通过ReLU函数}}float*** y2 = y1;//给变量y2Vector2 Cout2;//记录池化后的尺寸   10float*** y3 = Pool(y1, Cout, 20, Cout2);//池化层float* y4 = reshape(y3, Cout2, 20, true);//作为神经元输入float* v5 = dot(W5, y4);//矩阵乘法float* y5 = ReLU(v5, W5);//ReLU函数float* v = dot(Wo, y5);//举证乘法float* y = Softmax(v, Wo);//soft分类::fread(&hao, sizeof(unsigned char), 1, tp);//读取标签D.re(hao);float* e = new float[10];for (int i = 0; i < 10; i++)e[i] = ((float)D.one[i]) - y[i];float* delta = e;float* e5 = FXCB_err(Wo, delta);float* delta5 = Delta2(y5, e5, W5);float* e4 = FXCB_err(W5, delta5);float*** e3 = Toshape3(e4, 20, Cout2);float*** e2 = apply3(20, Cout.height, Cout.width);Weight one(2, 2, ones);/*for (int i = 0; i < 20; i++){::printf("第%d层\n", i);for (int n = 0; n < Cout2.height; n++){for (int m = 0; m < Cout2.width; m++)::printf("%0.3f ", e3[i][n][m]);puts("");}}getchar();*/for (int i = 0; i < 20; i++)//---------------------------------kron(e2[i], Cout, e3[i], Cout2, one.WG, one.len);/*for (int i = 0; i < 20; i++){::printf("第%d层\n", i);for (int n = 0; n < Cout.height; n++){for (int m = 0; m < Cout.width; m++)::printf("%f ", e2[i][n][m]);puts("");}}getchar();*/float*** delta2 = apply3(20, Cout.height, Cout.width);for (int i = 0; i < 20; i++)for (int n = 0; n < Cout.height; n++)for (int m = 0; m < Cout.width; m++)delta2[i][n][m] = (y2[i][n][m] > 0) * e2[i][n][m];float*** delta_x = (float***)malloc(sizeof(float***) * 20);Vector2 t1;for (int i = 0; i < 20; i++)delta_x[i] = conv2(X, ve, delta2[i], Cout, &t1);for (int i = 0; i < 20; i++)for (int n = 0; n < t1.height; n++)for (int m = 0; m < t1.width; m++)dW1[i].WG[n][m] += delta_x[i][n][m];dW5.re(delta5, y4, 1);dWo.re(delta, y5, 1);Free3(delta_x, 20, t1.height);Free3(delta2, 20, Cout.height);one.release();Free3(e2, 20, Cout.height);Free3(e3, 20, Cout2.height);free(e4);free(delta5);free(e5);free(v5);delete e;free(y5);free(v);free(y);Free3(y1, 20, Cout.height);free(y4);}for (int i = 0; i < 20; i++)dW1[i] /= (bsize);dW5 /= (bsize);dWo /= (bsize);for (int i = 0; i < 20; i++)for (int n = 0; n < W1[0].len.height; n++)for (int m = 0; m < W1[0].len.width; m++){momentum1[i].WG[n][m] = ALPHA * dW1[i].WG[n][m] + BETA * momentum1[i].WG[n][m];W1[i].WG[n][m] += momentum1[i].WG[n][m];}for (int n = 0; n < W5.len.height; n++)for (int m = 0; m < W5.len.width; m++)momentum5.WG[n][m] = ALPHA * dW5.WG[n][m] + BETA * momentum5.WG[n][m];W5 += momentum5;for (int n = 0; n < Wo.len.height; n++)for (int m = 0; m < Wo.len.width; m++)momentumo.WG[n][m] = ALPHA * dWo.WG[n][m] + BETA * momentumo.WG[n][m];Wo += momentumo;}for (int i = 0; i < 20; i++){momentum1[i].release();dW1[i].release();}momentum5.release();momentumo.release();Free2(X, ve.height);free(blist);delete reader;D.release();dW5.release();dWo.release();return;
}
float rand1()
{float temp = (rand() % 20) / (float)10;if (temp < 0.0001)temp = 0.07;temp *= (rand() % 2 == 0) ? -1 : 1;return temp * 0.01;
}
float rand2()
{float temp = (rand() % 10) / (float)10;float ret = (2 * temp - 1) * sqrt(6) / sqrt(360 + 2000);if (ret < 0.0001 && ret>-0.0001)ret = 0.07;return ret;
}
float rand3()
{float temp = (rand() % 10) / (float)10;float ret = (2 * temp - 1) * sqrt(6) / sqrt(10 + 100);if (ret < 0.0001 && ret>-0.0001)ret = 0.07;return ret;
}void train()
{FILE* fp = fopen("t10k-images.idx3-ubyte", "rb");FILE* tp = fopen("t10k-labels.idx1-ubyte", "rb");int rdint;::fread(&rdint, sizeof(int), 1, fp);::printf("训练集幻数:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集数量:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集高度:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集宽度:%d\n", ReverseInt(rdint));int start1 = ftell(fp);::fread(&rdint, sizeof(int), 1, tp);::printf("标签幻数:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, tp);::printf("标签数量:%d\n", ReverseInt(rdint));int start2 = ftell(tp);Weight* W1 = new Weight[20];WD(W1, 9, 9, 20, rand1);Weight W5(100, 2000, rand2);Weight Wo(10, W5.len.height, rand3);for (int k = 0; k < 3; k++){trainSGD(W1, W5, Wo, fp, tp);fseek(fp, start1, 0);fseek(tp, start2, 0);::printf("第%d次训练结束\n", k + 1);}fclose(fp);fclose(tp);fp = fopen("mnist_Weight.acp", "wb");for (int i = 0; i < 20; i++)W1[i].save(fp);W5.save(fp);Wo.save(fp);fclose(fp);::printf("训练完成");getchar();
}
void train1()
{FILE* fp = fopen("out_img.acp", "rb");FILE* tp = fopen("out_label.acp", "rb");int rdint;::fread(&rdint, sizeof(int), 1, fp);::printf("训练集幻数:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集数量:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集高度:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集宽度:%d\n", ReverseInt(rdint));int start1 = ftell(fp);::fread(&rdint, sizeof(int), 1, tp);::printf("标签幻数:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, tp);::printf("标签数量:%d\n", ReverseInt(rdint));int start2 = ftell(tp);Weight* W1 = new Weight[20];WD(W1, 9, 9, 20, rand1);Weight W5(100, 2000, rand2);Weight Wo(10, W5.len.height, rand3);for (int k = 0; k < 1000; k++){trainSGD1(W1, W5, Wo, fp, tp);fseek(fp, start1, 0);fseek(tp, start2, 0);::printf("第%d次训练结束\n", k + 1);}fclose(fp);fclose(tp);fp = fopen("mnist_Weight.acp", "wb");for (int i = 0; i < 20; i++)W1[i].save(fp);W5.save(fp);Wo.save(fp);fclose(fp);::printf("训练完成");getchar();
}
void test()
{FILE* fp = fopen("mnist_Weight.acp", "rb");Weight* W1 = new Weight[20];WD(W1, 9, 9, 20, rand1);Weight W5(100, 2000, rand1);Weight Wo(10, W5.len.height, rand1);for (int i = 0; i < 20; i++)W1[i].load(fp);W5.load(fp);Wo.load(fp);fclose(fp);fp = fopen("t10k-images.idx3-ubyte", "rb");FILE* tp = fopen("t10k-labels.idx1-ubyte", "rb");int rdint;::fread(&rdint, sizeof(int), 1, fp);::printf("训练集幻数:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集数量:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集高度:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集宽度:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, tp);::printf("标签幻数:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, tp);::printf("标签数量:%d\n", ReverseInt(rdint));unsigned char* res = new unsigned char[28 * 28];float** X = apply2(28, 28);unsigned char biaoqian;Vector2 t2828 = Vector2(28, 28);for (int i = 0; i < 50; i++){::fread(res, sizeof(unsigned char), 28 * 28, fp);Toshape2(X, res, 28, 28);print(X, t2828);float* h = SGD(W1, W5, Wo, X);//带入神经网络int c = -1;for (int i = 0; i < 10; i++){if (h[i] > 0.85){c = i;break;}}::fread(&biaoqian, sizeof(unsigned char), 1, tp);::printf("正确结果应当为“%d”,      神经网络识别为“%d”   \n", biaoqian, c);}
}
void sb()
{Weight* W1;Weight W5(100, 2000, rand2);Weight Wo(10, W5.len.height, rand3);//::printf("加载权重完毕\n");Vector2 out;char path[256];for (int r = 0; r < 4; r++){sprintf(path, "acp%d.png", r);float** img = Get_data_by_Mat(path, out);//print(img, out);float* h = SGD(W1, W5, Wo, img);//带入神经网络int c = -1;float x = 0;for (int i = 0; i < 10; i++){if (h[i] > 0.85 && h[i] > x){x = h[i];c = i;}}::printf("%d ", c);Free2(img, out.height);free(h);remove(path);}puts("");
}void sb(char* path)
{Weight* W1 = new Weight[20];Weight W5(100, 2000, rand2);Weight Wo(10, W5.len.height, rand3);FILE* fp = fopen("mnist_Weight.acp", "rb");puts("开始加载权重");WD(W1, 9, 9, 20, rand1);for (int i = 0; i < 20; i++)W1[i].load(fp);W5.load(fp);Wo.load(fp);fclose(fp);::printf("加载权重完毕\n");Vector2 out;float** img = Get_data_by_Mat(path, out);printf("图像载入完毕");//print(img, out);float* h = SGD(W1, W5, Wo, img);//带入神经网络int c = -1;float max = -1;for (int i = 0; i < 10; i++){::printf("%f\n", h[i]);/*if (h[i] > 0.65 && h[i] > x){x = h[i];c = i;}*/if (max< h[i]){max = h[i];c = i;}}::printf("神经网络认为它是数字-->%d   相似度为:%f", c, max);Free2(img, out.height);free(h);}
bool thank(int x1,int x2, int y1, int y2, int z1, int z2 )
{int dis = 0;int xx = (x1 - x2);dis += xx * xx;xx = (y1 - y2);dis += xx * xx;xx = (z1 - z2);dis += xx * xx;dis = (int)sqrt(dis);if (dis < 100)return true;return false;
}
void qg(char* path)
{::printf(path);::printf("识别为:");//Mat img = imread(path);CImage img;img.Load(path);//Vec3b yes = Vec3b(204, 198, 204);CImage sav;// = Mat(120, 80, CV_8UC3);sav.Create(120, 80, 24);ResizeCImage(img, img.GetWidth() * 10, img.GetHeight() * 10);int XS = img.GetBPP() / 8;int pitch = img.GetPitch();//resize(img, img, Size(img.cols * 10, img.rows * 10));unsigned char* rgb = (unsigned char*)img.GetBits();for (int i = 0; i < img.GetHeight(); i++)for (int j = 0; j < img.GetWidth(); j++){//Vec3b rgb = img.at<Vec3b>(i, j);int x1= *(rgb + (j * XS) + (i * pitch) + 0);int y1 = *(rgb + (j * XS) + (i * pitch) + 1);int z1 = *(rgb + (j * XS) + (i * pitch) + 2);if (thank(x1, 204, y1, 198, z1, 204)){*(rgb + (j * XS) + (i * pitch) + 0) = 255;*(rgb + (j * XS) + (i * pitch) + 1) = 255;*(rgb + (j * XS) + (i * pitch) + 2) = 255;//img.at<Vec3b>(i, j) = Vec3b(255, 255, 255);}/*elseimg.at<Vec3b>(i, j) = Vec3b(0, 0, 0);*/}/*char p[256];for (int k = 0; k < 4; k++){sprintf(p, "acp%d.png", k);for (int i = 35 + (k * 80); i < 115 + (k * 80); i++)for (int j = 30; j < 150; j++)sav.at<Vec3b>(j - 30, i - (35 + (k * 80))) = img.at<Vec3b>(j, i);imwrite(p, sav);}img.release();sav.release();*/sb();
}
void test1()
{FILE* fp = fopen("mnist_Weight.acp", "rb");Weight* W1 = new Weight[20];WD(W1, 9, 9, 20, rand1);Weight W5(100, 2000, rand1);Weight Wo(10, W5.len.height, rand1);for (int i = 0; i < 20; i++)W1[i].load(fp);W5.load(fp);Wo.load(fp);fclose(fp);fp = fopen("out_img.acp", "rb");FILE* tp = fopen("out_label.acp", "rb");int rdint;::fread(&rdint, sizeof(int), 1, fp);::printf("训练集幻数:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集数量:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集高度:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, fp);::printf("训练集宽度:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, tp);::printf("标签幻数:%d\n", ReverseInt(rdint));::fread(&rdint, sizeof(int), 1, tp);::printf("标签数量:%d\n", ReverseInt(rdint));unsigned char* res = new unsigned char[28 * 28];float** X = apply2(28, 28);unsigned char biaoqian;Vector2 t2828 = Vector2(28, 28);for (int i = 0; i < 10; i++){::fread(res, sizeof(unsigned char), 28 * 28, fp);Toshape2(X, res, 28, 28);print(X, t2828);float* h = SGD(W1, W5, Wo, X);//带入神经网络int c = -1;for (int i = 0; i < 10; i++){if (h[i] > 0.85){c = i;break;}}::fread(&biaoqian, sizeof(unsigned char), 1, tp);::printf("正确结果应当为“%d”,      神经网络识别为“%d”   \n", biaoqian, c);}
}
void main(int argc, char** argv)
{//train();//请先调用这个训练,训练结束后就可以直接加载权重了if (argc > 1){sb(argv[1]);getchar();}
}
#include"TP_NNW.h"
#include<iostream>
#pragma warning(disable:4996)
void Weight::apply(int H, int W)
{fz = true;this->len.height = H;this->len.width = W;this->WG = apply2(H, W);//申请内存for (int i = 0; i < H; i++)for (int j = 0; j < W; j++)this->WG[i][j] = Get_rand();//得到随机值
}void Weight::apply(int H, int W, float(*def)())
{fz = true;this->len.height = H;this->len.width = W;this->WG = apply2(H, W);for (int i = 0; i < H; i++)for (int j = 0; j < W; j++)this->WG[i][j] = def();
}Weight::~Weight()
{this->release();
}Weight::Weight(int H/*高度*/, int W/*宽度*/)
{W = W <= 0 ? 1 : W;//防止出现0和负数H = H <= 0 ? 1 : H;//防止出现0和负数fz = true;this->apply(H, W);
}Weight::Weight(int H/*高度*/, int W/*宽度*/, float(*def)())
{W = W <= 0 ? 1 : W;H = H <= 0 ? 1 : H;fz = true;this->apply(H, W, def);
}void Weight::re(float* delta, float* inp, float alpha)
{for (int i = 0; i < this->len.height; i++){for (int j = 0; j < this->len.width; j++)this->WG[i][j] += alpha * delta[i] * inp[j];}
}void Weight::save(FILE* fp)
{for (int i = 0; i < this->len.height; i++)for (int j = 0; j < this->len.width; j++)fwrite(&this->WG[i][j], sizeof(float), 1, fp);
}void Weight::load(FILE* fp)
{for (int i = 0; i < this->len.height; i++)for (int j = 0; j < this->len.width; j++)fread(&this->WG[i][j], sizeof(float), 1, fp);
}void Weight::release()
{if (this->fz){Free2(this->WG, this->len.height);//free(this->WG);}this->fz = false;
}
void Weight::operator >> (Weight& temp)
{temp.release();//free(temp.WG);temp.apply(this->len.height, this->len.width, zeros);
}
void Weight::operator+=(Weight& temp)
{for (int i = 0; i < this->len.height; i++)for (int j = 0; j < this->len.width; j++)this->WG[i][j] += temp.WG[i][j];
}//void Weight::operator/=(int & temp)
//{
//	for (int i = 0; i < this->len.height; i++)
//		for (int j = 0; j < this->len.width; j++)
//			this->WG[i][j] /= temp;
//}void Weight::operator/=(int temp)
{for (int i = 0; i < this->len.height; i++)for (int j = 0; j < this->len.width; j++)this->WG[i][j] /= temp;
}void Weight::operator<<(Weight& temp)
{Free2(this->WG, this->len.height);this->len.height = temp.len.height;this->len.width = temp.len.width;this->WG = temp.WG;
}void WD(Weight* WGS, int H, int W, int len)
{for (int i = 0; i < len; i++){WGS[i].apply(H, W);}
}
void WD(Weight* WGS, int H, int W, int len, float(*def)())
{for (int i = 0; i < len; i++){WGS[i].apply(H, W, def);}
}
float zeros()
{return 0;
}void print(float* y, int y_len)
{for (int i = 0; i < y_len; i++){printf("%0.2f ", y[i]);//printf("%d ", y[i]>0?1:0);}puts("");
}void print(float* y, Vector2& vec)
{print(y, vec.height);
}void print(float** y, Vector2& vec)
{for (int i = 0; i < vec.height; i++)print(y[i], vec.width);
}void print(char* y, int y_len)
{for (int i = 0; i < y_len; i++){printf("%d ", y[i]);}puts("");
}void print(char** y, Vector2& vec)
{for (int i = 0; i < vec.height; i++)print(y[i], vec.width);
}void print(Weight& w)
{print(w.WG, w.len);
}void print(Weight* w, int len)
{for (int i = 0; i < len; i++){printf("\n第%d层\n", i + 1);print(w[i]);}
}float** apply2(int H, int W)
{float** temp = (float**)malloc(sizeof(float**) * H);for (int i = 0; i < H; i++)temp[i] = (float*)malloc(sizeof(float*) * W);return temp;
}float*** apply3(int P, int H/*高度*/, int W/*宽度*/)
{float*** temp = (float***)malloc(sizeof(float***) * P);for (int i = 0; i < P; i++)temp[i] = apply2(H, W);return temp;
}char** apply2_char(int H, int W)
{char** temp = (char**)malloc(sizeof(float**) * H);for (int i = 0; i < H; i++)temp[i] = (char*)malloc(sizeof(float*) * W);return temp;
}
float ones()
{return 1;
}
float*** Conv(float** X, Vector2& inp, Vector2& out, Weight* W, int W_len)
{out.height = inp.height - W[0].len.height + 1;out.width = inp.width - W[0].len.width + 1;float*** temp = (float***)malloc(sizeof(float***) * W_len);for (int k = 0; k < W_len; k++)temp[k] = conv2(X, inp, W[k].WG, W[0].len);return temp;
}
float*** Pool(float*** y, Vector2& inp, int P, Vector2& out)
{int h = inp.height / 2, w = inp.width / 2;out.height = h;out.width = w;float*** temp = apply3(P, h, w);float** filter = apply2(2, 2);for (int i = 0; i < 2; i++)for (int j = 0; j < 2; j++)filter[i][j] = 0.25;for (int k = 0; k < P; k++){Vector2 len;Vector2 t22 = Vector2(2, 2);float** img = conv2(y[k], inp, filter, t22, &len);for (int i = 0; i < h; i++)for (int j = 0; j < w; j++)temp[k][i][j] = img[i * 2][j * 2];Free2(img, len.height);}Free2(filter, 2);return temp;
}
float* apply1(int H)
{float* temp = (float*)malloc(sizeof(float*) * H);return temp;
}char* apply1_char(int H)
{char* temp = (char*)malloc(sizeof(char*) * H);return temp;
}float Get_rand()
{float temp = (float)(rand() % 10) / (float)10;return rand() % 2 == 0 ? temp : -temp;
}float Sigmoid(float x)
{return 1 / (1 + exp(-x));
}float* Sigmoid(float* x, Weight& w)
{return Sigmoid(x, w.len.height);
}float* Sigmoid(float* x, int height)
{float* y = (float*)malloc(sizeof(float*) * height);for (int i = 0; i < height; i++)y[i] = Sigmoid(x[i]);return y;
}float ReLU(float x)
{return x > 0 ? x : 0;
}float* ReLU(float* x, Weight& w)
{return ReLU(x, w.len.height);
}float* ReLU(float* x, int height)
{float* y = (float*)malloc(sizeof(float*) * height);for (int i = 0; i < height; i++)y[i] = ReLU(x[i]);return y;
}float* Softmax(float* x, Weight& w)
{return Softmax(x, w.len.height);
}float dsigmoid(float x)
{return x * (1 - x);
}float* Softmax(float* x, int height)
{float* t = new float[height];float* ex = new float[height];float sum = 0;for (int i = 0; i < height; i++){ex[i] = exp(x[i]);sum += ex[i];}for (int i = 0; i < height; i++){t[i] = ex[i] / sum;}delete ex;return t;
}float* FXCB_err(Weight& w, float* delta)
{float* temp = (float*)malloc(sizeof(float*) * w.len.width);for (int i = 0; i < w.len.width; i++)temp[i] = 0;for (int i = 0; i < w.len.width; i++)for (int j = 0; j < w.len.height; j++)temp[i] += w.WG[j][i] * delta[j];return temp;
}float* Delta1(float* y, float* e, Weight& w)
{float* temp = (float*)malloc(sizeof(float*) * w.len.height);for (int i = 0; i < w.len.height; i++)temp[i] = y[i] * (1 - y[i]) * e[i];return temp;
}float* Delta2(float* v, float* e, Weight& w)
{float* temp = (float*)malloc(sizeof(float*) * w.len.height);for (int i = 0; i < w.len.height; i++)temp[i] = v[i] > 0 ? e[i] : 0;return temp;
}float* dot(Weight& W, float* inp, int* len)
{float* temp = (float*)malloc(sizeof(float*) * W.len.height);for (int i = 0; i < W.len.height; i++)temp[i] = 0;for (int i = 0; i < W.len.height; i++){for (int j = 0; j < W.len.width; j++)temp[i] += (W.WG[i][j] * inp[j]);}if (len != NULL)*len = W.len.height;return temp;
}char* randperm(int max, int count)
{char* temp = new char[count] {0};for (int i = 0; i < count; i++){while (1){char t = rand() % max;bool nothave = true;for (int j = 0; j < i; j++)if (t == temp[j]){nothave = false;break;}if (nothave){temp[i] = t;break;}}}return temp;
}void Dropout(float* y, float ratio, Weight& w)
{float* ym = new float[w.len.height] {0};float round = w.len.height * (1 - ratio);int num = (round - (float)(int)round >= 0.5f ? (int)round + 1 : (int)round);char* idx = randperm(w.len.height, num);for (int i = 0; i < num; i++){ym[idx[i]] = (1 / (1 - ratio));}for (int i = 0; i < w.len.height; i++){y[i] *= ym[i];}delete idx;delete ym;
}float** conv2(float** x, Vector2& x_len, float** fiter, Vector2& fiter_len, Vector2* out_len, int flag, int distance, int fill)
{switch (flag){case Valid:return VALID(x, x_len.height, x_len.width, fiter, fiter_len.height, fiter_len.width, distance, out_len);case Same:return SAME(x, x_len.height, x_len.width, fiter, fiter_len.height, fiter_len.width, distance, fill, out_len);}return nullptr;
}float** VALID(float** x, int x_h, int x_w, float** fiter, int fiter_h, int fiter_w, int distance, Vector2* out_len)
{int h = VALID_out_len(x_h, fiter_h, distance);int w = VALID_out_len(x_w, fiter_w, distance);float** temp = apply2(h, w);float** t = fiter;if (out_len != NULL){out_len->height = h;out_len->width = w;}for (int i = 0; i < x_h + 1 - fiter_h; i += distance)for (int j = 0; j < x_w + 1 - fiter_w; j += distance){float count = 0;for (int n = i; n < i + fiter_h; n++)for (int m = j; m < j + fiter_w; m++){if (n >= x_h || m >= x_w)continue;count += (x[n][m] * t[n - i][m - j]);}temp[(i / distance)][(j / distance)] = count;}//free(t);return temp;
}float** SAME(float** x, int x_h, int x_w, float** fiter, int fiter_h, int fiter_w, int distance, int fill, Vector2* out_len)
{return nullptr;
}int VALID_out_len(int x_len, int fiter_len, int distance)
{float temp = (float)(x_len - fiter_len) / (float)distance;int t = temp - (int)((float)temp) >= 0.5 ? (int)temp + 1 : (int)temp;t++;return t;
}void show_Weight(Weight& W)
{for (int i = 0; i < W.len.height; i++){for (int j = 0; j < W.len.width; j++){printf("%0.3f ", W.WG[i][j]);}puts("");}
}
void rot90(Weight& x)
{int h = x.len.width, w = x.len.height;x.WG = rot90(x.WG, x.len, true);x.len.width = w;x.len.height = h;
}
float** rot90(float** x, Vector2& x_len, bool release)
{float** temp = apply2(x_len.width, x_len.height);for (int i = 0; i < x_len.height; i++)for (int j = 0; j < x_len.width; j++){temp[x_len.width - 1 - j][i] = x[i][j];}if (release){Free2(x, x_len.height);//free(x);}return temp;
}float** rot180(float** x, Vector2& x_len, bool release)
{float** temp = apply2(x_len.height, x_len.width);for (int i = 0; i < x_len.height; i++){for (int j = 0; j < x_len.width; j++){temp[x_len.height - 1 - i][x_len.width - 1 - j] = x[i][j];}}if (release){Free2(x, x_len.height);//free(x);}return temp;
}
void ResizeCImage(CImage& image, int newWidth, int newHeight) {// 创建新的CImage对象,并设置大小CImage resizedImage;resizedImage.Create(newWidth, newHeight, image.GetBPP());// 使用Gdiplus::Graphics将原始图像绘制到新图像上,并进行缩放SetStretchBltMode(resizedImage.GetDC(), HALFTONE);image.StretchBlt(resizedImage.GetDC(), 0, 0, newWidth, newHeight);// 完成绘制后,释放新图像的设备上下文resizedImage.ReleaseDC();// 将结果拷贝回原始的CImage对象image.Destroy();image.Attach(resizedImage.Detach());resizedImage.Destroy();
}
float** Get_data_by_Mat(char* filepath, Vector2& out_len)
{CImage mat;// = cv::imread(filepath, 0);//cv::resize(mat, mat, cv::Size(28, 28));mat.Load(filepath);ResizeCImage(mat, 28, 28);/*cv::imshow("tt", mat);cv::waitKey(0);*/out_len.height = mat.GetHeight();out_len.width = mat.GetWidth();float** temp = apply2(mat.GetHeight(), mat.GetWidth());unsigned char* rgb = (unsigned char*)mat.GetBits();int pitch = mat.GetPitch();int hui = 0;int XS = mat.GetBPP()/8;for (int i = 0; i < out_len.height; i++)for (int j = 0; j < out_len.width; j++){hui = 0;for (int kkk = 0; kkk < 3; kkk++){hui += *(rgb + (j * XS) + (i * pitch) + kkk);}hui /= 3;temp[i][j] = ((float)hui / (float)255);//temp[i][j] = 1 - temp[i][j];}mat.Destroy();return temp;
}char** Get_data_by_Mat_char(char* filepath, Vector2& out_len, int threshold)
{CImage mat;mat.Load(filepath);//cv::Mat mat = cv::imread(filepath, 0);out_len.height = mat.GetHeight();out_len.width = mat.GetWidth();char** temp = apply2_char(out_len.height, out_len.width);unsigned char* rgb = (unsigned char*)mat.GetBits();int pitch = mat.GetPitch();int hui = 0;int XS = mat.GetBPP() / 8;for (int i = 0; i < out_len.height; i++)for (int j = 0; j < out_len.width; j++){hui = 0;for (int kkk = 0; kkk < 3; kkk++){hui += *(rgb + (j * XS) + (i * pitch) + kkk);}hui /= 3;temp[i][j] = hui > threshold ? 0 : 1;}mat.Destroy();return temp;
}void Get_data_by_Mat(char* filepath, Weight& w)
{w.WG = Get_data_by_Mat(filepath, w.len);
}Weight Get_data_by_Mat(char* filepath)
{Weight temp;Get_data_by_Mat(filepath, temp);return temp;
}Vector2::Vector2()
{this->height = 0;this->width = 0;
}Vector2::Vector2(char height, int width)
{this->height = height;this->width = width;
}XML::XML(FILE* fp, char* name, int layer)
{this->fp = fp;this->name = name;this->layer = layer;
}void XML::showchild()
{char reader[500];while (fgets(reader, 500, this->fp)){int len = strlen(reader);int lay = 0;for (; lay < len; lay++){if (reader[lay] != '\t')break;}if (lay == this->layer){if (reader[lay + 1] == '/')continue;char show[500];memset(show, 0, 500);for (int i = lay + 1; i < len - 2; i++){if (reader[i] == '>')break;show[i - lay - 1] = reader[i];}puts(show);}}fseek(this->fp, 0, 0);
}void bit::operator=(int x)
{this->B = x;
}float* reshape(float** x, int h, int w)
{float* temp = (float*)malloc(sizeof(float*) * w * h);int count = 0;for (int i = 0; i < h; i++)for (int j = 0; j < w; j++){temp[count++] = x[i][j];}return temp;
}float* reshape(float** x, Vector2& x_len)
{return reshape(x, x_len.height, x_len.width);
}float* reshape(float*** x, Vector2& x_len, int P, bool releace)
{float* temp = apply1(x_len.height * x_len.width * P);int c = 0;for (int i = 0; i < P; i++)for (int n = 0; n < x_len.height; n++)for (int m = 0; m < x_len.width; m++)temp[c++] = x[i][n][m];if (releace)Free3(x, P, x_len.height);//free(x);return temp;
}int* bList(int distance, int max, int* out_len)
{int num = (max % distance != 0);int t = (int)(max / distance);t += num;if (out_len != NULL)*out_len = t;int* out = (int*)malloc(sizeof(int*) * t);for (int i = 0; i < t; i++){out[i] = i * distance;}return out;
}void Free2(float** x, int h)
{for (int i = 0; i < h; i++)free(x[i]);free(x);
}void Free3(float*** x, int p, int h)
{for (int i = 0; i < p; i++)for (int j = 0; j < h; j++)free(x[i][j]);for (int i = 0; i < p; i++)free(x[i]);free(x);
}void kron(float** out, Vector2& out_len, float** inp, Vector2& inp_len, float** filter, Vector2& filter_len)
{for (int i = 0; i < inp_len.height; i++)for (int j = 0; j < inp_len.width; j++){for (int n = i * 2; n < out_len.height && n < ((i * 2) + filter_len.height); n++)for (int m = (j * 2); m < ((j * 2) + filter_len.width) && m < out_len.width; m++){out[n][m] = inp[i][j] * filter[n - (i * 2)][m - (j * 2)] * 0.25;}}
}char** mnist::Toshape2(char* x, int h, int w)
{char** temp = apply2_char(h, w);int c = 0;for (int i = 0; i < h; i++)for (int j = 0; j < w; j++)temp[i][j] = x[c++];return temp;
}char** mnist::Toshape2(char* x, Vector2& x_len)
{return mnist::Toshape2(x, x_len.height, x_len.width);
}void mnist::Toshape2(char** out, char* x, int h, int w)
{int c = 0;for (int i = 0; i < h; i++)for (int j = 0; j < w; j++)out[i][j] = x[c++];
}void mnist::Toshape2(char** out, char* x, Vector2& x_len)
{mnist::Toshape2(out, x, x_len.height, x_len.width);
}float** mnist::Toshape2_F(char* x, int h, int w)
{float** temp = apply2(h, w);int c = 0;for (int i = 0; i < h; i++)for (int j = 0; j < w; j++)temp[i][j] = ((float)x[c++] / (float)255);return temp;
}float** mnist::Toshape2_F(char* x, Vector2& x_len)
{return mnist::Toshape2_F(x, x_len.height, x_len.width);
}void mnist::Toshape2(float** out, char* x, int h, int w)
{int c = 0;for (int i = 0; i < h; i++)for (int j = 0; j < w; j++)out[i][j] = ((float)x[c++] / (float)255);
}void mnist::Toshape2(float** out, char* x, Vector2& x_len)
{mnist::Toshape2(out, x, x_len.height, x_len.width);
}void mnist::Toshape2(float** out, unsigned char* x, int h, int w)
{int c = 0;for (int i = 0; i < h; i++)for (int j = 0; j < w; j++){out[i][j] = ((float)x[c++] / (float)255);}
}void mnist::Toshape2(float** out, unsigned char* x, Vector2& x_len)
{mnist::Toshape2(out, x, x_len.height, x_len.width);
}float*** mnist::Toshape3(float* x, int P, Vector2& x_len)
{float*** temp = apply3(P, x_len.height, x_len.width);int c = 0;for (int i = 0; i < P; i++)for (int j = 0; j < x_len.height; j++)for (int n = 0; n < x_len.width; n++)temp[i][j][n] = x[c++];return temp;
}int mnist::ReverseInt(int i)
{unsigned char ch1, ch2, ch3, ch4;ch1 = i & 255;ch2 = (i >> 8) & 255;ch3 = (i >> 16) & 255;ch4 = (i >> 24) & 255;return((int)ch1 << 24) + ((int)ch2 << 16) + ((int)ch3 << 8) + ch4;
}
#pragma once
#include<Windows.h>
#include<atlimage.h>
#define ALPHA 0.01
#define BETA 0.95
#define RATIO 0.2
void ResizeCImage(CImage& image, int newWidth, int newHeight);
struct bit
{unsigned B : 1;void operator=(int x);
};
enum Conv_flag
{Valid = 0,Same = 1
};
struct Vector2 {int height, width;Vector2();Vector2(char height, int width);
};
class Weight
{
private:void apply(int H/*高度*/, int W/*宽度*/);void apply(int H/*高度*/, int W/*宽度*/, float(*def)());
public:bool fz;Vector2 len;float** WG;~Weight();Weight() { fz = false; }Weight(int H/*高度*/, int W/*宽度*/);Weight(int H/*高度*/, int W/*宽度*/, float (*def)());void re(float* delta, float* inp, float alpha = ALPHA);void save(FILE* fp);void load(FILE* fp);void release();void operator>>(Weight& temp);void operator+=(Weight& temp);//void operator/=(int &temp);void operator/=(int temp);void operator<<(Weight& temp);void friend WD(Weight* WGS, int H, int W, int len);void friend WD(Weight* WGS, int H, int W, int len, float(*def)());
};
float zeros();
float ones();
float*** Pool(float*** y, Vector2& inp, int P, Vector2& out);//池化
float*** Conv(float** X, Vector2& inp, Vector2& out, Weight* W, int W_len);//卷积
void print(float* y, int y_len = 1);
void print(float* y, Vector2& vec);
void print(float** y, Vector2& vec);
void print(char* y, int y_len = 1);
void print(char** y, Vector2& vec);
void print(Weight& w);
void print(Weight* w, int len = 1);
float** apply2(int H/*高度*/, int W/*宽度*/);
float*** apply3(int P, int H/*高度*/, int W/*宽度*/);
char** apply2_char(int H/*高度*/, int W/*宽度*/);
float* apply1(int H);
char* apply1_char(int H);
float Get_rand();
float Sigmoid(float x);
float* Sigmoid(float* x, Weight& w);
float* Sigmoid(float* x, int height);
float ReLU(float x);
float* ReLU(float* x, Weight& w);
float* ReLU(float* x, int height);
float* Softmax(float* x, Weight& w);
float dsigmoid(float x);
float* Softmax(float* x, int height);
float* FXCB_err(Weight& w, float* delta);
float* Delta1(float* y, float* e, Weight& w);
float* Delta2(float* v, float* e, Weight& w);
float* dot(Weight& W/*权重*/, float* inp/*输入数据*/, int* len = NULL);
char* randperm(int max, int count);
void Dropout(float* y, float ratio, Weight& w);
float** conv2(float** x, Vector2& x_len, float** fiter, Vector2& fiter_len,Vector2* out_len = NULL, int flag = Valid, int distance = 1, int fill = 0);
float** VALID(float** x, int x_h, int x_w, float** fiter, int fiter_h,int fiter_w, int distance, Vector2* out_len = NULL);
float** SAME(float** x, int x_h, int x_w, float** fiter, int fiter_h,int fiter_w, int distance, int fill, Vector2* out_len = NULL);
int VALID_out_len(int x_len, int fiter_len, int distance);
void show_Weight(Weight& W);
void rot90(Weight& x);
float** rot90(float** x, Vector2& x_len, bool release = false);
float** rot180(float** x, Vector2& x_len, bool release = false);
float** Get_data_by_Mat(char* filepath, Vector2& out_len);
char** Get_data_by_Mat_char(char* filepath, Vector2& out_len, int threshold = 127);
void Get_data_by_Mat(char* filepath, Weight& w);
Weight Get_data_by_Mat(char* filepath);
float* reshape(float** x, int h, int w);
float* reshape(float** x, Vector2& x_len);
float* reshape(float*** x, Vector2& x_len, int P, bool releace = false);
namespace mnist
{char** Toshape2(char* x, int h, int w);char** Toshape2(char* x, Vector2& x_len);void Toshape2(char** out, char* x, int h, int w);void Toshape2(char** out, char* x, Vector2& x_len);float** Toshape2_F(char* x, int h, int w);float** Toshape2_F(char* x, Vector2& x_len);void Toshape2(float** out, char* x, int h, int w);void Toshape2(float** out, char* x, Vector2& x_len);void Toshape2(float** out, unsigned char* x, int h, int w);void Toshape2(float** out, unsigned char* x, Vector2& x_len);float*** Toshape3(float* x, int P, Vector2& x_len);int ReverseInt(int i);
}
struct XML
{char* name;FILE* fp;int layer;XML(FILE* fp, char* name, int layer);void showchild();
};
template<class T>
class hot_one
{bool fz;
public:T* one;int num;int count;hot_one() { this->fz = false; }hot_one(int type_num, int set_num = 0){type_num = type_num <= 0 ? 1 : type_num;if (set_num >= type_num)set_num = 0;this->count = type_num;this->fz = true;this->num = set_num;this->one = new T[type_num]{ 0 };this->one[set_num] = 1;}void re(int set_num){this->one[num] = 0;this->num = set_num;this->one[this->num] = 1;}void release(){if (this->fz)delete one;this->fz = false;}~hot_one(){this->release();}
};
int* bList(int distance, int max, int* out_len);
void Free2(float** x, int h);
void Free3(float*** x, int p, int h);
void kron(float** out, Vector2& out_len, float** inp, Vector2& inp_len, float** filter,Vector2& filter_len);

相关文章:

C++卷积神经网络

C卷积神经网络 #include"TP_NNW.h" #include<iostream> #pragma warning(disable:4996) using namespace std; using namespace mnist;float* SGD(Weight* W1, Weight& W5, Weight& Wo, float** X) {Vector2 ve(28, 28);float* temp new float[10];V…...

go 读取yaml映射到struct

安装 go get gopkg.in/yaml.v3创建yaml Mysql:Host: 192.168.214.134Port: 3306UserName: wwPassword: wwDatabase: go_dbCharset: utf8mb4ParseTime: trueLoc: LocalListValue:- haha- test- vv JWTSecret: nidaye定义结构体 type Mysql struct {Host string yaml:&…...

Redis 10 大数据类型

1. which 10 1. redis字符串 2. redis 列表 3. redis哈希表 4. redis集合 5. redis有序集合 6. redis地理空间 7. redis基数统计 8. redis位图 9. redis位域 10. redis流 2. 获取redis常见操作指令 官网英文&#xff1a;https://redis.io/commands 官网中文&#xff1a;https:/…...

优化生产流程:数字化工厂中的OPC UA分布式IO模块应用

背景 近年来&#xff0c;为了提升在全球范围内的竞争力&#xff0c;制造企业希望自己工厂的机器之间协同性更强&#xff0c;自动化设备采集到的数据能够发挥更大的价值&#xff0c;越来越多的传统型工业制造企业开始加入数字化工厂建设的行列&#xff0c;实现智能制造。 数字化…...

Elasticsearch(十四)搜索---搜索匹配功能⑤--全文搜索

一、前言 不同于之前的term。terms等结构化查询&#xff0c;全文搜索首先对查询词进行分析&#xff0c;然后根据查询词的分词结果构建查询。这里所说的全文指的是文本类型数据&#xff08;text类型&#xff09;,默认的数据形式是人类的自然语言&#xff0c;如对话内容、图书名…...

已解决Gradle错误:“Unable to load class ‘org.gradle.api.plugins.MavenPlugin‘”

&#x1f337;&#x1f341; 博主猫头虎 带您 Go to New World.✨&#x1f341; &#x1f984; 博客首页——猫头虎的博客&#x1f390; &#x1f433;《面试题大全专栏》 文章图文并茂&#x1f995;生动形象&#x1f996;简单易学&#xff01;欢迎大家来踩踩~&#x1f33a; &a…...

windows中安装sqlite

1. 下载文件 官网下载地址&#xff1a;https://www.sqlite.org/download.html 下载sqlite-dll-win64-x64-3430000.zip和sqlite-tools-win32-x86-3430000.zip文件&#xff08;32位系统下载sqlite-dll-win32-x86-3430000.zip&#xff09;。 2. 安装过程 解压文件 解压上一步…...

前端面试:【系统设计与架构】前端架构模式的演进

前端架构模式在现代Web开发中扮演着关键角色&#xff0c;它们帮助我们组织和管理前端应用的复杂性。本文将介绍一些常见的前端架构模式&#xff0c;包括MVC、MVVM、Flux和Redux&#xff0c;以及它们的演进和应用。 1. MVC&#xff08;Model-View-Controller&#xff09;&#x…...

【CSS】em单位的理解

1、em单位的定义 MDN的解释&#xff1a;它是相对于父元素的字体大小的一个单位。 例如&#xff1a;父元素font-size&#xff1a;16px&#xff1b;子元素的font-size&#xff1a;2em&#xff08;也就是32px&#xff09; 注&#xff1a;有一个误区&#xff0c;虽然他是一个相对…...

无涯教程-Python机器学习 - Based on human supervision函数

Python机器学习 中的 Based on human s - 无涯教程网无涯教程网提供https://www.learnfk.com/python-machine-learning/machine-learning-with-python-based-on-human-supervision.html...

【滑动窗口】leetcode209:长度最小的子数组

一.题目描述 长度最小的子数组 二.思路分析 题目要求&#xff1a;找出长度最小的符合要求的连续子数组&#xff0c;这个要求就是子数组的元素之和大于等于target。 如何确定一个连续的子数组&#xff1f;确定它的左右边界即可。如此一来&#xff0c;我们最先想到的就是暴力枚…...

C++ STL unordered_map

map hashmap 文章目录 Map、HashMap概念map、hashmap 的区别引用头文件初始化赋值unordered_map 自定义键值类型unordered_map 的 value 自定义数据类型遍历常用方法插入查找 key修改 value删除元素清空元素 unordered_map 中每一个元素都是一个 key-value 对&#xff0c;数据…...

全流程R语言Meta分析核心技术应用

Meta分析是针对某一科研问题&#xff0c;根据明确的搜索策略、选择筛选文献标准、采用严格的评价方法&#xff0c;对来源不同的研究成果进行收集、合并及定量统计分析的方法&#xff0c;最早出现于“循证医学”&#xff0c;现已广泛应用于农林生态&#xff0c;资源环境等方面。…...

Go并发可视化解释 - Select语句

昨天&#xff0c;我发布了一篇文章&#xff0c;用可视化的方式解释了Golang中通道&#xff08;Channel&#xff09;的工作原理。如果你对通道的理解仍然存在困难&#xff0c;最好呢请在阅读本文之前先查看那篇文章。作为一个快速的复习&#xff1a;Partier、Candier 和 Stringe…...

在线SM4(国密)加密解密工具

在线SM4(国密)加密解密工具...

golang的类型断言语法

例子1 在 Go 中&#xff0c;err.(interface{ Timeout() bool }) 是一个类型断言语法。它用于检查一个接口类型的变量 err 是否实现了一个带有 Timeout() bool 方法的接口。 具体而言&#xff0c;该类型断言的语法如下&#xff1a; if v, ok : err.(interface{ Timeout() boo…...

提速换挡 | 至真科技用技术打破业务壁垒,助力出海破局增长

各个行业都在谈出海&#xff0c;但真正成功的又有多少&#xff1f; 李宁出海十年海外业务收入占比仅有1.3%&#xff0c;走出去战略基本失败。 京东出海业务磕磕绊绊&#xff0c;九年过去国际化业务至今在财报上都不配拥有姓名。 几百万砸出去买量&#xff0c;一点水花都没有…...

第3篇:vscode搭建esp32 arduino开发环境

第1篇:Arduino与ESP32开发板的安装方法 第2篇:ESP32 helloword第一个程序示范点亮板载LED 1.下载vscode并安装 https://code.visualstudio.com/ 运行VSCodeUserSetup-x64-1.80.1.exe 2.点击扩展&#xff0c;搜索arduino,并点击安装 3.点击扩展设置&#xff0c;配置arduino…...

Apache Shiro是什么

特点 Apache Shiro是一个强大且易用的Java安全框架,用于身份验证、授权、会话管理和加密。它的设计目标是简化应用程序的安全性实现,使开发人员能够更轻松地处理各种安全性问题,从而提高应用程序的安全性和可维护性。下面是一些Apache Shiro的关键特点和概念: 特点和概念…...

Socket基本原理

一、简单介绍 Socket&#xff0c;又称套接字&#xff0c;是Linux跨进程通信&#xff08;IPC&#xff0c;Inter Process Communication&#xff09;方式的一种。相比于其他IPC方式&#xff0c;Socket牛逼在于可做到同一台主机内跨进程通信&#xff0c;不同主机间的跨进程通信。…...

Docker容器:本地私有仓库、harbor私有仓库部署与管理

文章目录 Docker容器&#xff1a;本地私有仓库、harbor私有仓库部署与管理一.本地私有仓库1.本地私有仓库概述2.搭建本地私有仓库3.容器重启策略简介 二.harbor私有仓库部署与管理1.什么是harbor2.Harbor的特性3、Harbor的构成4.Harbor私有仓库架构及数据流向5.harbor部署及配置…...

Mobx在非react组件中修改数据,在ts/js中修改数据实现响应式更新

我们都之前在封装mobx作为数据存储的时候&#xff0c;使用到了useContext作为包裹&#xff0c;将store变成了一个hooks使用&#xff0c;封装代码: import React from react import UserInfo from ./user import Setting from ./seting import NoteStore from ./noteclass Stor…...

什么是异步编程?什么是回调地狱(callback hell)以及如何避免它?

聚沙成塔每天进步一点点 ⭐ 专栏简介⭐ 异步编程⭐ 回调地狱&#xff08;Callback Hell&#xff09;⭐ 如何避免回调地狱1. 使用Promise2. 使用async/await3. 模块化和分离 ⭐ 写在最后 ⭐ 专栏简介 前端入门之旅&#xff1a;探索Web开发的奇妙世界 记得点击上方或者右侧链接订…...

Java8 Stream流常见操作--持续更新中

创建新数组 List<Fruit> newList fruits.stream().map(f -> new Fruit(f.getId(), f.getName() "s", f.getCountry())).collect(Collectors.toList())筛选数组 Map<Boolean, List<TransferData>> preAvg list.stream().collect(Collectors…...

【Linux】多线程概念线程控制

文章目录 多线程概念Linux下进程和线程的关系pid本质上是轻量级进程id&#xff0c;换句话说&#xff0c;就是线程IDLinux内核是如何创建一个线程的线程的共享和独有线程的优缺点 线程控制POSIX线程库线程创建线程终止线程等待线程分离 多线程概念 Linux下进程和线程的关系 在…...

Qt --- 自定义提示框 类似QMessagebox

QMessageBox::information(NULL, QString("title"), QString("I am information")); 以下是自定义提示框的代码&#xff0c;有图有真相&#xff01;提示框大部分都采用模态的形式&#xff0c;关于模态也不再多提&#xff01;所以父类为QDialog&#xff0c;…...

Redis 分布式锁与 Redlock 算法实现

Redis 分布式锁与 Redlock 算法实现 一、简介1. Redis的分布式锁2. 分布式锁的实现原理 二、Redis 分布式锁使用场景1. 分布式系统中数据资源的互斥访问2. 分布式环境中多个节点之间的协作3. 常见场景及应用 三、Redlock算法的原理与实现1. Redlock算法的背景2. Redlock算法的原…...

【附安装包】Inventor2024安装教程 机械制图|三维制图

软件下载 软件&#xff1a;Inventor版本&#xff1a;2024语言&#xff1a;简体中文大小&#xff1a;5.61G安装环境&#xff1a;Win11/Win10/Win8/Win7硬件要求&#xff1a;CPU2.5GHz 内存8G(或更高&#xff09;下载通道①百度网盘丨64位下载链接&#xff1a;https://pan.baidu…...

c++ 判断基类指针指向的真实对象类型

在 c 面向对象使用中&#xff0c;我们常常会定义一个基类类型的指针&#xff0c;在运行过程中&#xff0c;这个指针可能指向一个基类类型的对象&#xff0c;也可能指向的是其子类类型的对象&#xff0c;那现在问题来了&#xff0c;我们如何去判断这个指针到底执行了一个什么类型…...

退出屏保前玩一把游戏吧!webBrowser中网页如何调用.NET方法

本文主要以 HackerScreenSaver 新功能的开发经历介绍 webBrowser中网页如何调用.NET方法的过程。 1. 背景 之前开源了一款名为 HackerScreenSaver 的 Windows 屏保程序。该程序具有模拟黑客炫酷界面的特点&#xff0c;用户可以将自定义的网页作为锁屏界面。不久前&#xff0c;…...

十大网站建设公司排名/梅花seo 快速排名软件

计算机用户协会工作总结通过一学期的准备学习&#xff0c;我校这一学期终于进入生本教育的实施阶段了。我是一个体育老师&#xff0c;出去听的报告也少&#xff0c;学习的也不怎么系统&#xff0c;但我知道生本教育是适合现代教育的教育&#xff0c;是与时俱进的教育。我们也都…...

上海网站建设报价单/大型网站seo课程

无意中看到一篇小文章&#xff0c;感觉说的很有道理&#xff0c;必须转&#xff1a; Short-term assignments, transfers, or rotation programs can have big advantages: You’re exposed to new geographies, functions, cultures, and people. But these temporary positi…...

自己可以接单做网站吗/百度云盘搜索

为什么需要权限管理 1、安全性&#xff1a;误操作、人为破坏、数据泄露等&#xff1b; 2、数据隔离&#xff1a;不同的权限能看到及操作不同的数据&#xff1b; 3、明确职责&#xff1a;运营、客服等不同角色&#xff0c;leader和dev等不同级别 权限管理核心 1、用户—权限&…...

wordpress地址和找点地址/网络推广员是干什么的

UNSW又一次逆天&#xff0c;有两门科目可能要上大家的黑名单&#xff0c;最近抛出了一个噩耗般的消息——期中考的&#xff0c;挂科率&#xff01;和出国留学网来看看澳洲新南威尔士大学考试挂科率达41%。一、概述据悉&#xff0c;第一门科目的期中考试成绩公布之后&#xff0c…...

哪个网站做中高端衣服/网站提交入口大全

FTPC客户端可以通过以下两种协议与Plant Operations中间件通信:XML Web ServicesRMI/IIOP calls to session beans因此通过这两种协议&#xff0c;我们可以自定义开发FTPC客户端。在FTPC安装路径&#xff0c;如下路径有一个PlantOpsClientSDK开发工具包&#xff0c;可以使用该S…...

急求一张 网站正在建设中的图片/个人怎么在百度上做推广

最近在调试单片机程序&#xff0c;总是遇到一些开始认为不可思议的事情&#xff0c;后来解决了问题后发现&#xff0c;都是因为自己书写程序不严谨&#xff0c;其主要原因是没有注意c语言的数据类型的数据范围。 对有符号、无符号数据的使用存在误区&#xff0c;对类型转换理解…...