48、OAK通过共享内存传递变长结构体(Rapidjson)进行数据和图片交互

2023-05-16

基本思想:主要学习一下在共享内存中传递变长的数据,这样在c#调用c++dll也可以雷同操作,以oak的检测和共享内存为代码整合,集成了rapidjson的使用,代码自己摘要和参考吧

cmakelists.txt

cmake_minimum_required(VERSION 3.16)
project(depthai)
set(CMAKE_CXX_STANDARD 11)
find_package(OpenCV REQUIRED)
#message(STATUS ${OpenCV_INCLUDE_DIRS})
#添加头文件
include_directories(${OpenCV_INCLUDE_DIRS})
include_directories(${CMAKE_SOURCE_DIR}/include/rapidjson)
include_directories(${CMAKE_SOURCE_DIR}/include)
include_directories(${CMAKE_SOURCE_DIR}/include/utility)
#链接Opencv库
find_package(depthai CONFIG REQUIRED)
add_executable(depthai main.cpp Write.cpp Write.h Reader.cpp Reader.h common.h OAK.cpp OAK.h)
target_link_libraries(depthai ${OpenCV_LIBS}  depthai::opencv -lpthread)

mian.cpp


#include "OAK.h"
#include <iostream>
#include <string>
#include <thread>
#include "Reader.h"
#include "Write.h"

void oak0(OAK *oak,WriteMem *writemem,int key_id)
{
    int ok=writemem->init_paramter(key_id);
    if(ok==0){
        printf("initial write successfully\n");
    }
        oak->detect(writemem);

}


void tcp(ReaderMem *readermem,int key_id)
{
    int ok=readermem->init_paramter(key_id);
    if(ok==0){
        printf("initial reader successfully\n");
    }
    while (true) {

        readermem->read_data();


    }
}

int main(int argc, char **argv) {
    //  oak初始化
    OAK *oak = new OAK();
    std::string nnPath("../frozen_darknet_yolov4_model.blob");

    int key_id=3546;
    oak->initial(nnPath);
///

    WriteMem *writemem=new WriteMem();

    ReaderMem *readermem=new ReaderMem();



    std::thread thread1(oak0,oak,writemem,key_id);
    std::thread thread2(tcp,readermem,key_id);
    thread1.join();
    thread2.join();



    writemem->clean_data();
    readermem->clean_data();
    delete oak;// oak 释放
    delete writemem;
    delete readermem;

    return 0;
}

common.h

//
// Created by ubuntu on 2022/11/15.
//

#ifndef DEPTHAI_COMMON_H
#define DEPTHAI_COMMON_H

#include "rapidjson/document.h"
#include "rapidjson/writer.h"
#include "rapidjson/stringbuffer.h"

using namespace rapidjson;

#define SHARE_MEMORY_BUFFER_LEN 1024
#define IMAGE_SIZE 3*416*416

struct stuShareMemory {
    int iSignal;
    char chBuffer[SHARE_MEMORY_BUFFER_LEN];
    int chLength;
    char pixel[IMAGE_SIZE];
    int pixel_rows;
    int pixel_cols;
    int pixel_channels;

    stuShareMemory() {
        iSignal = 0;
        chLength = 0;
        pixel_rows = 0;
        pixel_cols = 0;
        pixel_channels = 0;
        memset(chBuffer, 0, SHARE_MEMORY_BUFFER_LEN);
        memset(pixel, 0, IMAGE_SIZE);

    };
};


#endif //DEPTHAI_COMMON_H

OAK.h



#ifndef DEPTHAI_OAK_H
#define DEPTHAI_OAK_H
#include <chrono>

#include "utility.hpp"

#include "depthai/depthai.hpp"
#include "Write.h"

#include "rapidjson/document.h"
#include "rapidjson/writer.h"
#include "rapidjson/stringbuffer.h"
using namespace rapidjson;
using namespace std;
using namespace std::chrono;



class OAK {
public:
    OAK();

    ~OAK();

public:
    int initial(std::string nnPath);
    int detect(WriteMem *writemem);

private:
    // Create pipeline
    dai::Pipeline pipeline;
    std::atomic<bool> syncNN{true};
    std::shared_ptr<dai::DataOutputQueue> previewQueue;
    std::shared_ptr<dai::DataOutputQueue> detectionNNQueue;
    std::shared_ptr<dai::DataOutputQueue> xoutBoundingBoxDepthMappingQueue;
    std::shared_ptr<dai::DataOutputQueue> depthQueue;
    std::shared_ptr<dai::DataOutputQueue> networkQueue;
    cv::Scalar color = cv::Scalar(255, 255, 255);
    bool printOutputLayersOnce = true;
    const std::vector<std::string> labelMap = {
            "person",        "bicycle",      "car",           "motorbike",     "aeroplane",   "bus",         "train",       "truck",        "boat",
            "traffic light", "fire hydrant", "stop sign",     "parking meter", "bench",       "bird",        "cat",         "dog",          "horse",
            "sheep",         "cow",          "elephant",      "bear",          "zebra",       "giraffe",     "backpack",    "umbrella",     "handbag",
            "tie",           "suitcase",     "frisbee",       "skis",          "snowboard",   "sports ball", "kite",        "baseball bat", "baseball glove",
            "skateboard",    "surfboard",    "tennis racket", "bottle",        "wine glass",  "cup",         "fork",        "knife",        "spoon",
            "bowl",          "banana",       "apple",         "sandwich",      "orange",      "broccoli",    "carrot",      "hot dog",      "pizza",
            "donut",         "cake",         "chair",         "sofa",          "pottedplant", "bed",         "diningtable", "toilet",       "tvmonitor",
            "laptop",        "mouse",        "remote",        "keyboard",      "cell phone",  "microwave",   "oven",        "toaster",      "sink",
            "refrigerator",  "book",         "clock",         "vase",          "scissors",    "teddy bear",  "hair drier",  "toothbrush"};

private:
    WriteMem *write;

};


#endif //DEPTHAI_OAK_H

OAK.cpp


#include "OAK.h"


OAK::OAK() {


}

OAK::~OAK() {

}

int OAK::initial(std::string nnPath) {

    // Define sources and outputs
    auto camRgb = pipeline.create<dai::node::ColorCamera>();
    auto spatialDetectionNetwork = pipeline.create<dai::node::YoloSpatialDetectionNetwork>();
    auto monoLeft = pipeline.create<dai::node::MonoCamera>();
    auto monoRight = pipeline.create<dai::node::MonoCamera>();
    auto stereo = pipeline.create<dai::node::StereoDepth>();

    auto xoutRgb = pipeline.create<dai::node::XLinkOut>();
    auto xoutNN = pipeline.create<dai::node::XLinkOut>();
    auto xoutBoundingBoxDepthMapping = pipeline.create<dai::node::XLinkOut>();
    auto xoutDepth = pipeline.create<dai::node::XLinkOut>();
    auto nnNetworkOut = pipeline.create<dai::node::XLinkOut>();

    xoutRgb->setStreamName("rgb");
    xoutNN->setStreamName("detections");
    xoutBoundingBoxDepthMapping->setStreamName("boundingBoxDepthMapping");
    xoutDepth->setStreamName("depth");
    nnNetworkOut->setStreamName("nnNetwork");

    // Properties
    camRgb->setPreviewSize(416, 416);
    camRgb->setResolution(dai::ColorCameraProperties::SensorResolution::THE_1080_P);
    camRgb->setInterleaved(false);
    camRgb->setColorOrder(dai::ColorCameraProperties::ColorOrder::BGR);

    monoLeft->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
    monoLeft->setBoardSocket(dai::CameraBoardSocket::LEFT);
    monoRight->setResolution(dai::MonoCameraProperties::SensorResolution::THE_400_P);
    monoRight->setBoardSocket(dai::CameraBoardSocket::RIGHT);

    // setting node configs
    stereo->setDefaultProfilePreset(dai::node::StereoDepth::PresetMode::HIGH_DENSITY);
    // Align depth map to the perspective of RGB camera, on which inference is done
    stereo->setDepthAlign(dai::CameraBoardSocket::RGB);
    stereo->setOutputSize(monoLeft->getResolutionWidth(), monoLeft->getResolutionHeight());

    spatialDetectionNetwork->setBlobPath(nnPath);
    spatialDetectionNetwork->setConfidenceThreshold(0.5f);
    spatialDetectionNetwork->input.setBlocking(false);
    spatialDetectionNetwork->setBoundingBoxScaleFactor(0.5);
    spatialDetectionNetwork->setDepthLowerThreshold(100);
    spatialDetectionNetwork->setDepthUpperThreshold(5000);

    // yolo specific parameters
    spatialDetectionNetwork->setNumClasses(80);
    spatialDetectionNetwork->setCoordinateSize(4);
    spatialDetectionNetwork->setAnchors({10, 14, 23, 27, 37, 58, 81, 82, 135, 169, 344, 319});
    spatialDetectionNetwork->setAnchorMasks({{"side26", {1, 2, 3}},
                                             {"side13", {3, 4, 5}}});
    spatialDetectionNetwork->setIouThreshold(0.5f);

    // Linking
    monoLeft->out.link(stereo->left);
    monoRight->out.link(stereo->right);

    camRgb->preview.link(spatialDetectionNetwork->input);
    if (syncNN) {
        spatialDetectionNetwork->passthrough.link(xoutRgb->input);
    } else {
        camRgb->preview.link(xoutRgb->input);
    }

    spatialDetectionNetwork->out.link(xoutNN->input);
    spatialDetectionNetwork->boundingBoxMapping.link(xoutBoundingBoxDepthMapping->input);

    stereo->depth.link(spatialDetectionNetwork->inputDepth);
    spatialDetectionNetwork->passthroughDepth.link(xoutDepth->input);
    spatialDetectionNetwork->outNetwork.link(nnNetworkOut->input);
    // Connect to device and start pipeline

    return 0;
}

int OAK::detect(WriteMem *writemem) {


    dai::Device device(pipeline);

    // Output queues will be used to get the rgb frames and nn data from the outputs defined above
    previewQueue = device.getOutputQueue("rgb", 4, false);
    detectionNNQueue = device.getOutputQueue("detections", 4, false);
    xoutBoundingBoxDepthMappingQueue = device.getOutputQueue("boundingBoxDepthMapping", 4, false);
    depthQueue = device.getOutputQueue("depth", 4, false);
    networkQueue = device.getOutputQueue("nnNetwork", 4, false);
    auto startTime = steady_clock::now();


    while (true) {
        rapidjson::Document doc;
        doc.SetArray();
        rapidjson::Document::AllocatorType &allocator = doc.GetAllocator();

        auto imgFrame = previewQueue->get<dai::ImgFrame>();
        auto inDet = detectionNNQueue->get<dai::SpatialImgDetections>();
        auto depth = depthQueue->get<dai::ImgFrame>();
        auto inNN = networkQueue->get<dai::NNData>();

        if (printOutputLayersOnce && inNN) {
            std::cout << "Output layer names: ";
            for (const auto &ten : inNN->getAllLayerNames()) {
                std::cout << ten << ", ";
            }
            std::cout << std::endl;
            printOutputLayersOnce = false;
        }

        cv::Mat frame = imgFrame->getCvFrame();
        cv::Mat depthFrame = depth->getFrame();  // depthFrame values are in millimeters

        cv::Mat depthFrameColor;
        cv::normalize(depthFrame, depthFrameColor, 255, 0, cv::NORM_INF, CV_8UC1);
        cv::equalizeHist(depthFrameColor, depthFrameColor);
        cv::applyColorMap(depthFrameColor, depthFrameColor, cv::COLORMAP_HOT);


        auto detections = inDet->detections;
        if (!detections.empty()) {
            auto boundingBoxMapping = xoutBoundingBoxDepthMappingQueue->get<dai::SpatialLocationCalculatorConfig>();
            auto roiDatas = boundingBoxMapping->getConfigData();

            for (auto roiData : roiDatas) {
                auto roi = roiData.roi;
                roi = roi.denormalize(depthFrameColor.cols, depthFrameColor.rows);
                auto topLeft = roi.topLeft();
                auto bottomRight = roi.bottomRight();
                auto xmin = (int) topLeft.x;
                auto ymin = (int) topLeft.y;
                auto xmax = (int) bottomRight.x;
                auto ymax = (int) bottomRight.y;

                cv::rectangle(depthFrameColor, cv::Rect(cv::Point(xmin, ymin), cv::Point(xmax, ymax)), color,
                              cv::FONT_HERSHEY_SIMPLEX);
            }
        }
        //rapidjson::Document sub_doc;
        // sub_doc.SetObject();
        for (const auto &detection : detections) {
            int x1 = detection.xmin * frame.cols;
            int y1 = detection.ymin * frame.rows;
            int x2 = detection.xmax * frame.cols;
            int y2 = detection.ymax * frame.rows;
            uint32_t labelIndex = detection.label;

            if (labelIndex == 0) {
                rapidjson::Document item_doc;
                item_doc.SetObject();
                std::string labelStr = to_string(labelIndex);
                if (labelIndex < labelMap.size()) {
                    labelStr = labelMap[labelIndex];
                }


                item_doc.AddMember("x1", x1 * 1.0, allocator);
                item_doc.AddMember("y1", y1 * 1.0, allocator);
                item_doc.AddMember("x2", x2 * 1.0, allocator);
                item_doc.AddMember("y2", y2 * 1.0, allocator);
                item_doc.AddMember("c", detection.confidence * 1.0, allocator);
                item_doc.AddMember("d", (int) detection.spatialCoordinates.z * 1.0, allocator);
                doc.PushBack(item_doc, allocator);

                cv::putText(frame, labelStr, cv::Point(x1 + 10, y1 + 20), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
                std::stringstream confStr;
                confStr << std::fixed << std::setprecision(2) << detection.confidence * 100;
                cv::putText(frame, confStr.str(), cv::Point(x1 + 10, y1 + 35), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);

                std::stringstream depthX;
                depthX << "X: " << (int) detection.spatialCoordinates.x << " mm";
                cv::putText(frame, depthX.str(), cv::Point(x1 + 10, y1 + 50), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
                std::stringstream depthY;
                depthY << "Y: " << (int) detection.spatialCoordinates.y << " mm";
                cv::putText(frame, depthY.str(), cv::Point(x1 + 10, y1 + 65), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);
                std::stringstream depthZ;
                depthZ << "Z: " << (int) detection.spatialCoordinates.z << " mm";
                cv::putText(frame, depthZ.str(), cv::Point(x1 + 10, y1 + 80), cv::FONT_HERSHEY_TRIPLEX, 0.5, 255);

                cv::rectangle(frame, cv::Rect(cv::Point(x1, y1), cv::Point(x2, y2)), color, cv::FONT_HERSHEY_SIMPLEX);
            }

        }


        rapidjson::StringBuffer buffer;
        rapidjson::Writer<rapidjson::StringBuffer> write_json(buffer);
        doc.Accept(write_json);
        std::string buf_json_str = buffer.GetString();
        allocator.Clear();

        writemem->write_data(buf_json_str,frame);



        // cv::imshow("depth", depthFrameColor);
        cv::imshow("rgb", frame);

        int key = cv::waitKey(1);
        if (key == 'q' || key == 'Q') {
            return 0;
        }
    }
    return 0;
}

Reader.h


#ifndef DEPTHAI_READER_H
#define DEPTHAI_READER_H
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/shm.h>
#include <string.h>
#include <signal.h>
#include "iostream"
#include "common.h"
#include "opencv2/core.hpp"
#include "opencv2/highgui.hpp"
#define SHARE_MEMORY_BUFFER_LEN 1024

class ReaderMem {
public:
    ReaderMem();
    ~ReaderMem();

public:
    int init_paramter(int key_id);
    int read_data();
    void clean_data();
private:
    rapidjson::Document document;
    rapidjson::Value DetectObj;
    struct stuShareMemory *stu = NULL;
    void *shm = NULL;
    struct Msg *msg;
    int shmid=-1;



};
#endif //DEPTHAI_READER_H

Reader.cpp



#include "Reader.h"

ReaderMem::ReaderMem(){}
ReaderMem::~ReaderMem(){}

int ReaderMem::init_paramter(int key_id){

    printf("ReaderMem %d\n",key_id);
     shmid = shmget((key_t)key_id, sizeof(struct stuShareMemory), 0666|IPC_CREAT);
    if(shmid == -1)
    {
        printf("shmget err.\n");
        return -1;
    }

    shm = shmat(shmid, (void*)0, 0);
    if(shm == (void*)-1)
    {
        printf("shmat err.\n");
        return -1;
    }

    stu = (struct stuShareMemory*)shm;

    //stu->iSignal = 1;


    return 0;
}

int ReaderMem::read_data() {



    if(stu->iSignal != 0)
    {
        std::string acceptMem=stu->chBuffer;
        std::string newMem=acceptMem.substr(0,stu->chLength);
        printf("-----------reader------------ \n%s\n",newMem.c_str());
        cv::Mat cvoutImg = cv::Mat(stu->pixel_rows,stu->pixel_cols,CV_8UC3,cv::Scalar(255, 255, 255));//bufHight,bufWidth
        memcpy((char*)cvoutImg.data, stu->pixel,stu->pixel_rows*stu->pixel_cols*stu->pixel_channels);
        cv::imshow("read",cvoutImg);
        cv::waitKey(1);
        if (!document.Parse(newMem.c_str()).HasParseError()) {
            rapidjson::Value& json_array = document;

            for (rapidjson::SizeType i = 0; i < json_array.Size(); i++)
            {
                rapidjson::Value& json_obj = json_array[i];
                if (json_obj.IsObject())
                {
                    if (json_obj.HasMember("x1"))
                    {
                        rapidjson::Value& json = json_obj["x1"];
                        if (json.IsFloat())
                        {
                            float x1 = json.GetFloat();
                            printf("%.2f\n",x1);
                        }
                         json = json_obj["y1"];
                        if (json.IsFloat())
                        {
                            float y1 = json.GetFloat();
                            printf("%.2f\n",y1);
                        }
                        json = json_obj["x2"];
                        if (json.IsFloat())
                        {
                            float x2 = json.GetFloat();
                            printf("%.2f\n",x2);
                        }
                        json = json_obj["c"];
                        if (json.IsFloat())
                        {
                            float c = json.GetFloat();
                            printf("%.2f\n",c);
                        }
                        json = json_obj["d"];
                        if (json.IsFloat())
                        {
                            float d = json.GetFloat();
                            printf("%.2f\n",d);
                        }
                    }
                }
            }
           }


        stu->iSignal = 0;
return 0;
    }

   return -1;
}

void ReaderMem::clean_data() {
    shmdt(shm);
    shmctl(shmid, IPC_RMID, 0);

}

Write.h



#ifndef DEPTHAI_WRITE_H
#define DEPTHAI_WRITE_H
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <sys/types.h>
#include <sys/shm.h>
#include <string.h>
#include <signal.h>
#include "iostream"
using namespace std;
#define SHARE_MEMORY_BUFFER_LEN 1024
#include "common.h"
#include "opencv2/core.hpp"
#include "opencv2/highgui.hpp"


class WriteMem {
public:
    WriteMem();
    ~WriteMem();

public:
    int init_paramter(int key_id);
    int write_data(std::string content,cv::Mat frame);
    void clean_data();
private:
    struct stuShareMemory *stu = NULL;
    void *shm = NULL;
    struct Msg *msg;
};


#endif //DEPTHAI_WRITE_H

Write.cpp



#include "Write.h"

WriteMem::WriteMem() {}

WriteMem::~WriteMem() {}

int WriteMem::init_paramter(int key_id) {
    printf("WriteMem %d\n",key_id);
    int shmid = shmget((key_t) key_id, sizeof(struct stuShareMemory), 0666 | IPC_CREAT);
    if (shmid == -1) {
        printf("shmget err.\n");
        return -1;
    }

    shm = shmat(shmid, (void *) 0, 0);
    if (shm == (void *) -1) {
        printf("shmat err.\n");
        return -1;
    }

    stu = (struct stuShareMemory *) shm;

    stu->iSignal = 0;

    return 0;
}

int WriteMem::write_data(std::string content,cv::Mat frame) {

    printf("-----------write------------\n %s \n",content.c_str());

    if (stu->iSignal != 1) {
        stu->chLength = content.size();
        memcpy(stu->chBuffer, content.c_str(), stu->chLength);

        char *image=(char*)frame.data;
        stu->pixel_rows = frame.rows;
        stu->pixel_cols = frame.cols;
        stu->pixel_channels = frame.channels();
        memcpy(stu->pixel,image,stu->pixel_rows*stu->pixel_cols*stu->pixel_channels);


        stu->iSignal = 1;
return 0;
    }
    return -1;
}

void WriteMem::clean_data() {
    shmdt(shm);

    std::cout << "end progress." << endl;
}

测试数据

-----------write------------
 [{"x1":30.0,"y1":266.0,"x2":125.0,"y2":414.0,"c":0.7772049903869629,"d":1530.0},{"x1":154.0,"y1":254.0,"x2":249.0,"y2":332.0,"c":0.6232306957244873,"d":3709.0},{"x1":0.0,"y1":309.0,"x2":38.0,"y2":415.0,"c":0.5261836051940918,"d":3442.0}] 
-----------reader------------ 
[{"x1":30.0,"y1":266.0,"x2":125.0,"y2":414.0,"c":0.7772049903869629,"d":1530.0},{"x1":154.0,"y1":254.0,"x2":249.0,"y2":332.0,"c":0.6232306957244873,"d":3709.0},{"x1":0.0,"y1":309.0,"x2":38.0,"y2":415.0,"c":0.5261836051940918,"d":3442.0}]
30.00
266.00
125.00
0.78
1530.00
154.00
254.00
249.00
0.62
3709.00
0.00
309.00
38.00
0.53
3442.00
-----------write------------
 [{"x1":29.0,"y1":267.0,"x2":121.0,"y2":415.0,"c":0.8682379722595215,"d":1765.0},{"x1":156.0,"y1":254.0,"x2":252.0,"y2":333.0,"c":0.605661153793335,"d":3693.0}] 
-----------reader------------ 
[{"x1":29.0,"y1":267.0,"x2":121.0,"y2":415.0,"c":0.8682379722595215,"d":1765.0},{"x1":156.0,"y1":254.0,"x2":252.0,"y2":333.0,"c":0.605661153793335,"d":3693.0}]
29.00
267.00
121.00
0.87
1765.00
156.00
254.00
252.00
0.61
3693.00

参考:

OpenCV CEO教你用OAK(四):创建复杂的管道 - 知乎

本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)

48、OAK通过共享内存传递变长结构体(Rapidjson)进行数据和图片交互 的相关文章

  • HTTP的长连接和短连接

    一 什么是长连接 HTTP1 1规定了默认保持长连接 xff08 HTTP persistent connection xff0c 也有翻译为持久连接 xff09 xff0c 数据传输完成了保持TCP连接不断开 xff08 不发RST包 不
  • libcurl库的异步用法

    multi接口的使用会比easy 接口稍微复杂点 xff0c 毕竟multi接口是依赖easy接口的 xff0c 首先粗略的讲下其使用流程 xff1a curl multi init初始化一个multi curl对象 xff0c 为了同时进
  • 在ubantu16.04 配置ROS开发realsense435功能包

    本文主要对intel的一款实感相机realsense435 进行配置 xff0c 将ROS对realsense435支持的功能包进行安装 一 安装SDK 1 简要介绍 鉴于自己安装过程中出现的问题 xff0c 现在在安装之前先确认几件事情
  • 提高C++程序运行效率,减少运行时间的方法

    大致方法 xff1a 1 优化业务逻辑 xff0c 尽量少做事情 2 减少网络访问 xff0c IO等对外操作 3 如果有数据库 xff0c 优化SQL和数据库结构 4 优化算法 xff0c 比如冒泡排序改成快排等 5 优化代码的编写 这个
  • 常量引用、非常量引用、临时对象

    转载自 xff1a https www cnblogs com littleant archive 2012 08 01 2618846 html https www cnblogs com BensonLaur p 5234555 htm
  • 字符串string中“\0“与‘\0‘的打印、拼接问题

    1 34 0 34 为字符串长度为0的字符串指针 xff0c 它与 34 34 等价 2 打印 34 0 34 与 39 0 39 相同 xff0c cout输出时都会显示为空 39 0 39 在string的初始化和拼接中size大小的不
  • Direct3D的初始化

    1 获取接口IDirect3D9 的指针 xff0c 该接口用于获取系统中物理硬件设备的信息并创建接口IDirect3DDevice9 xff0c 该接口是一个C 43 43 对象 xff0c 代表了我们用来显示3D图形的物理硬件设备 使用
  • 绘制流水线

    顶点结构与顶点格式 在Direct3D中 xff0c 顶点除了包含空间信息外 xff0c 还可以包含其他的附加属性 xff0c 例如顶点可以有颜色属性 xff0c 也可以有法线属性 xff0c Direct3D赋予了我们自定义顶点格式的自由
  • Direct3D中的绘制

    顶点缓存与索引缓存 一个顶点缓存是一个包含顶点数据的连续内存空间 xff0c 一个索引缓存是一个包含索引数据的连续内存空间 xff0c 之所以使用顶点缓存和索引缓存而非数组来存储数据 xff0c 是因为顶点缓存和索引缓存可以被放置在显存 中
  • 结构体内的指针

    利用 结构体对象 对 结构体内的指针 赋值 include 34 stdafx h 34 include lt string h gt include lt stdlib h gt struct Student char name 从节省空
  • 自制廉价难度低性能较好的锂电充电器(转)

    转载自 xff1a http www geek workshop com thread 53 1 1 html 自制廉价难度低性能较好的锂电充电器 附电压平衡器第一张 电路全图 这张有点模糊 别急 先看大致位置 等一下再分区介绍 这一部份
  • 子类调用父类中的友元函数

    由于友元函数并非类成员 xff0c 因此不能被继承 xff0c 在某种需求下 xff0c 可能希望子类的友元函数能够使用父类中的友元函数 为此可以通过强制类型转换 xff0c 将子类的指针或是引用强转为父类的引用或是指针 xff0c 然后使
  • 判断一个字符是否是十六进制

    判断一个字符是否是十六进制 十六进制 xff08 hexadecimal xff09 是计算机中数据的一种表示方法 xff0c 意思是逢十六进一 十六进制数以16为基数 xff0c 采用的数码是0 1 2 3 4 5 6 7 8 9 A B
  • 库文件、静态库(lib)与动态库(dll)的生成与使用

    静态库 程序编译一般需经预处理 编译 汇编和链接几个步骤 在应用中 xff0c 有一些公共代码是需要反复使用 xff0c 就把这些代码编译为 库 文件 xff1b 在链接步骤中 xff0c 连接器将从库文件取得所需的代码 xff0c 复制到
  • 句柄Handle的含义及使用

    句柄Handle的含义及使用 1 句柄 xff1a 头文件 xff1a winnt h 也可以使用windows h头文件 xff0c 如果winnt h和windows h一起包含时 xff0c 如果先后顺序不当 xff0c 会造成错误
  • SkeyeExPlayer(Windows)开发之接口说明

    SkeyeExPlayer xff08 windows xff09 接口说明如下 xff1a SkeyeExPlayer Open 说明 xff1a 打开一个媒体流或者媒体文件进行播放 xff0c 同时返回一个 player 对象指针 参数
  • 无人机视觉 机器学习 opencv

    最近开始学习机器视觉 xff0c 主要想实现的功能就是无人机的视觉导航 避障 为了实现这个功能 xff0c 涉及到的图像的识别 xff0c 图像的处理 xff0c 通过一张二维的图片来解算出实际物体在现实中的特征描述 xff0c 通过对现实
  • Java中char 转化为int 的两种方法

    今天机试过程中遇到一个问题 xff1a 如何把 char 9 转为 int 9 大家应该知道 xff0c 不能直接转化 xff0c 那样得到是 9 的Ascii 如下面 xff1a public class IntAndCharConver
  • 【代码】给Typecho添加访客信息气泡提醒

    转载请注明出处 xff1a 小锋学长生活大爆炸 xfxuezhang cn 目录 效果如图 设置方法 效果如图 设置方法 1 进入管理后台 xff0c 在 插入代码 处选 自定义增加 xff0c 或者其他可以输入 自定义代码 的地方 xff
  • 【翻译】Mobisys的Student Travel Grants

    我们很自豪地宣布 xff0c MobiSys 2023 将支持学生使用旅行补助金参加会议 学生旅行补助金 该计划的目的是通过为否则无法参加的学生提供旅行费用来鼓励研究生参加会议 资格 xff1a 此补助金的申请人在实际参加 MobiSys

随机推荐