Skip to content

Instantly share code, notes, and snippets.

@Jomy10
Last active January 8, 2024 15:30
Show Gist options
  • Save Jomy10/df312d08fd302aab51293ff961c5d8b0 to your computer and use it in GitHub Desktop.
Save Jomy10/df312d08fd302aab51293ff961c5d8b0 to your computer and use it in GitHub Desktop.

Revisions

  1. Jomy10 revised this gist Jan 8, 2024. 1 changed file with 0 additions and 18 deletions.
    18 changes: 0 additions & 18 deletions main.cpp
    Original file line number Diff line number Diff line change
    @@ -413,31 +413,13 @@ class CameraManager {
    }
    };

    #include <libyuv/convert_argb.h>
    #include <stdio.h>
    static void* screen_buffer;
    void onFrame(void* frame, int stride, size_t size) {
    char out[128];
    snprintf(out, 128, "On frame %p, %i, %zu", frame, stride, size);
    Log::info(out);
    memcpy(screen_buffer, frame, stride * screen_height); //screen_width * screen_height * 2);
    // FILE* f = fopen("test", "wb");
    // //std::cout << stride;
    // std::cout << screen_width * screen_height * 2 << std::endl;
    // if (int byteCount = fwrite(frame, 1, screen_width * screen_height * 2, f); byteCount != screen_width * screen_height * 2) {
    // std::cerr << "Error writing file; " << byteCount << " bytes written (" << strerror(errno) << ")" << std::endl;
    // };
    // fclose(f);
    // exit(1);
    return;
    libyuv::YUY2ToARGB(
    (const uint8_t*) frame,
    stride,
    (uint8_t*) screen_buffer,
    screen_width * screen_bytes,
    screen_width,
    screen_height
    );
    }

    #include <linux/fb.h>
  2. Jomy10 revised this gist Jan 8, 2024. 1 changed file with 498 additions and 37 deletions.
    535 changes: 498 additions & 37 deletions main.cpp
    Original file line number Diff line number Diff line change
    @@ -1,8 +1,36 @@
    #include <exception>
    #include <iostream>

    #include <cerrno>
    #include <cstring>
    #include <libcamera/libcamera.h>
    #include <iostream>
    #include <memory>
    #include <stdexcept>
    #include <exception>
    #include <string>
    #include <stdint.h>

    static int screen_width;
    static int screen_height;
    static int screen_bytes;

    static const char* eptr_to_string(std::exception_ptr eptr) {
    try {
    if (eptr) {
    std::rethrow_exception((eptr));
    }
    } catch(const std::exception& e) {
    return e.what();
    } catch (const char* e) {
    return e;
    } catch (std::string& e) {
    return e.c_str();
    } catch(...) {
    return "Unknown error";
    }

    return "Not an error";
    }

    // LOG //

    namespace Log {
    void log(std::string level, std::string msg) {
    @@ -26,56 +54,489 @@ namespace Log {
    }
    }

    class CameraManager {
    // LOG //

    // MEM MAP //
    #include <tuple>
    #include <unistd.h>
    #include <unordered_map>
    #include <vector>
    #include <sys/mman.h>

    struct MappedPlane {
    int fd;
    size_t offset;
    size_t len;
    };

    struct MapInfo {
    /// Maximum offset use by data planes
    unsigned int mappedLen;
    /// Total file descritor size
    unsigned int totalLen;
    };

    class MemoryMappedFrameBuffer {
    public:
    libcamera::CameraManager
    // maps plane fd to memory location and size
    std::unordered_map<int32_t, std::tuple<void*, size_t>> mmaps;
    const libcamera::FrameBuffer* fb;

    MemoryMappedFrameBuffer(libcamera::FrameBuffer const* fb) {
    auto planes = std::vector<MappedPlane>();
    auto mapInfo = std::unordered_map<int32_t, MapInfo>();
    this->fb = fb;

    enum MsgType {
    Timeout,
    Quit,
    RequestComplete
    };
    const std::vector<libcamera::FrameBuffer::Plane>& fbplanes = fb->planes();
    for (int i = 0; i < fbplanes.size(); i++) {
    Log::verbose("[MapInfo] Processing plane " + std::to_string(i) + " for frame buffer " + std::to_string(reinterpret_cast<intptr_t>(fb)));
    auto plane = &fbplanes[i];
    int fd = plane->fd.get();
    planes.push_back((MappedPlane){fd, plane->offset, plane->length});

    if (!mapInfo.contains(fd)) {
    auto totalLen = lseek64(fd, 0, SEEK_END);
    mapInfo[fd] = (MapInfo) {
    .mappedLen = 0,
    .totalLen = static_cast<unsigned int>(totalLen),
    };
    }

    auto& info = mapInfo[fd];

    if (plane->offset + plane->length > info.totalLen) {
    throw "Plane out of bounds";
    }

    info.mappedLen = std::max(info.mappedLen, plane->offset + plane->length);
    }

    this->mmaps = std::unordered_map<int32_t, std::tuple<void*, size_t>>();
    for (auto& it: mapInfo) {
    const int32_t& fd = it.first;
    const MapInfo& info = it.second;
    Log::verbose("[Memory map] Processing plane with fd " + std::to_string(fd) + " for frame buffer " + std::to_string(reinterpret_cast<intptr_t>(fb)));

    void* addr = mmap64(NULL, info.mappedLen, PROT_READ, MAP_PRIVATE, fd, 0);

    if (addr == MAP_FAILED) {
    throw "Memory map error; mapping plane " + std::to_string(fd) + ": " + strerror(errno);
    } else {
    mmaps[fd] = std::make_tuple(addr, info.mappedLen);
    Log::info("Mapped plane " + std::to_string(fd) + " of framebuffer " + std::to_string(reinterpret_cast<intptr_t>(fb)));
    }
    }
    }

    void openCamera() {
    Log::info("Opening camera...");
    ~MemoryMappedFrameBuffer() {
    for (auto& it: this->mmaps) {
    std::tuple<void*, size_t>& first = it.second;
    void* ptr = std::get<void*>(first);
    size_t size = std::get<size_t>(first);
    munmap(ptr, size);
    }
    }
    };

    static void event_loop(CameraManager& mgr) {
    mgr.openCamera();
    mgr.configureViewFinder();
    mgr.startCamera();
    for (unsigned int count = 0; count < 1000; count++) {
    CameraManager::Msg msg = mgr.wait();
    if (msg.type == CameraManager::MsgType::Timeout) {
    Log::err("Device timeout detected, attempting to restart")
    mgr.stopCamera();
    mgr.startCamera();
    continue;
    // MEM MAP //

    #define PF_YUYV 0x56595559
    #define PF_RGBA 0x41424752

    #define PIXEL_FORMAT_YUYV libcamera::PixelFormat(PF_YUYV, 0)
    #define PIXEL_FORMAT_RGBA libcamera::PixelFormat(PF_RGBA, 0)

    typedef std::vector<std::shared_ptr<libcamera::Camera>> const& CameraList;
    typedef void(*OnFrameCallback)(void*, int stride, size_t size);

    class CameraManager {
    public:
    std::unique_ptr<libcamera::CameraManager> mgr;
    std::shared_ptr<libcamera::Camera> camera;
    libcamera::Size size;
    unsigned int viewfinderStride;
    uint32_t pixelFormat;
    std::unique_ptr<libcamera::CameraConfiguration> cfgs;

    libcamera::FrameBufferAllocator* fba;
    std::vector<MemoryMappedFrameBuffer> memoryMappedFrameBuffers;
    std::vector<std::unique_ptr<libcamera::Request>> reqs;

    bool previewStarted = false;
    bool cancelRequests = false;
    OnFrameCallback onViewFrame;

    CameraManager(int cameraId, unsigned int width, unsigned int height, OnFrameCallback onFrame) {
    this->mgr = std::make_unique<libcamera::CameraManager>();
    this->mgr->start();

    Log::info("Libcamera version: " + this->mgr->version());

    CameraList cameras = mgr->cameras();
    Log::info("Found " + std::to_string(cameras.size()) + " cameras!");

    if (cameras.size() == 0) {
    throw std::runtime_error("No cameras connected");
    }

    if (cameraId >= cameras.size()) {
    throw std::runtime_error("Selected camera is not conected");
    }

    std::string const& camId = cameras[cameraId]->id();
    this->camera = this->mgr->get(camId);
    if (this->camera->acquire()) {
    throw std::runtime_error("Failed to acquire camera " + camId);
    }
    if (msg.type == CameraManager::MsgType::Quit) {
    return;
    } else if (msg.type != CameraManager::MsgType::RequestComplete) {
    throw std::runtime_error("unrecognized message!");

    Log::info("Camera acquired (" + camId + ")");

    libcamera::ControlList camProps = this->camera->properties();
    std::optional<std::string> model = camProps.get(libcamera::properties::Model);
    if (model.has_value()) {
    Log::info("Using camera: " + *model);
    } else {
    Log::warn("Could not get camera model");
    }

    CompletedRequestPtr& completedRequest = std::get<CompletedRequestPtr>(msg.payload);
    this->cfgs = this->camera->generateConfiguration({libcamera::StreamRole::Viewfinder});
    libcamera::StreamConfiguration& viewfinderConfig = this->cfgs->at(0);
    viewfinderConfig.pixelFormat = PIXEL_FORMAT_RGBA;
    viewfinderConfig.size.width = width;
    viewfinderConfig.size.height = height;

    if (mgr.viewfinderStream()) {
    Log::info("Viewfinder frame " + std::to_string(count));
    // AF?
    mgr.showPreview(completedRequest, mgr.viewfinderStream());
    auto status = this->cfgs->validate();
    switch (status) {
    case libcamera::CameraConfiguration::Status::Valid:
    Log::info("Camera configuration is valid");
    break;
    case libcamera::CameraConfiguration::Status::Adjusted:
    Log::warn("Camera configuration was adjusted");
    break;
    case libcamera::CameraConfiguration::Status::Invalid:
    throw "Camera configuration is invalid";
    }

    auto adjustedPixelFormat = this->cfgs->at(0).pixelFormat.fourcc();
    switch (adjustedPixelFormat) {
    case PF_YUYV:
    Log::info("Pixel format of viewfinder is YUYV");
    break;
    case PF_RGBA:
    Log::info("Pixel format of viewfinder is RGBA");
    break;
    default:
    char out[128];
    std::snprintf(out, 128, "Invalid pixel format for view finder %u", adjustedPixelFormat);
    throw out;
    }
    this->pixelFormat = adjustedPixelFormat;

    auto adjustedSize = this->cfgs->at(0).size;
    if (adjustedSize.width != width || adjustedSize.height != height) {
    throw "Invalid size";
    }
    this->size = adjustedSize;

    //this->viewfinderStream = cfgs->at(0).stream();

    if (int code = this->camera->configure(&(*cfgs))) {
    switch (code) {
    case -ENODEV:
    throw "Failed to configure camera: The camera has been disconnected from the system";
    case -EACCES:
    throw "Faield to configure camera: The camera is not in a state where it can be configured";
    case -EINVAL:
    throw "Failed to configure camera: The configuration is invalid";
    default:
    char out[64];
    std::snprintf(static_cast<char*>(out), 64, "Faield to configure camera: Unkown error (%i)", code);
    throw out;
    }
    }

    this->camera->requestCompleted.connect(this, &CameraManager::requestCompleted);

    Log::info("Camera has been configured");

    Log::info("Setting up buffers");
    this->fba = new libcamera::FrameBufferAllocator(this->camera);
    this->onViewFrame = onFrame;

    // Allocate frame buffers for the stream
    auto cfg = this->cfgs->at(0);
    auto stream = cfg.stream();
    this->viewfinderStride = stream->configuration().stride;
    int bufferCount = this->fba->allocate(stream);
    if (bufferCount < 0) {
    switch (bufferCount) {
    case -EACCES:
    Log::err("Failed to allocate buffers: The camera is not in a state where a buffer can be allocated");
    case -EINVAL:
    Log::err("Failed to allocate buffers: The stream does not belong to the camera or the stream is not part of the active camera configuration");
    case -EBUSY:
    Log::err("Failed to allocate buffers: Buffers are already allocated for the stream");
    default:
    Log::err("Failed to allocate buffers: Unknown error");
    }
    throw bufferCount;
    }
    Log::info("Allocated " + std::to_string(bufferCount) + " buffers");

    auto& buffers = this->fba->buffers(stream);

    // Map frame buffers to memory
    this->memoryMappedFrameBuffers.clear();
    //for (const std::unique_ptr<libcamera::FrameBuffer> buf: buffers) {
    for (int i = 0; i < buffers.size(); i++) {
    // try {
    auto memMapped = MemoryMappedFrameBuffer(buffers[i].get());
    this->memoryMappedFrameBuffers.push_back(memMapped);
    // } catch(...) {
    // auto eptr = std::current_exception();
    // Log::err(std::string("An error occured while memory mapping a frame buffer: ") + std::string(eptr_to_string(eptr)));
    // return -1;
    // }
    }
    Log::info("Mapped frame buffers to memory");

    // Create requests
    this->reqs.clear();
    for (int i = 0; i < this->memoryMappedFrameBuffers.size(); i++) {
    MemoryMappedFrameBuffer& buf = this->memoryMappedFrameBuffers[i];
    auto req = this->camera->createRequest(i);
    if (req->addBuffer(stream, (libcamera::FrameBuffer*) buf.fb)) {
    throw("Couldn't add buffer to request");
    }
    this->reqs.push_back(std::move(req));
    }

    Log::info("Buffers and requests created");
    }

    // Returns 0 on success
    int startVideoStream() {
    Log::info("Starting video stream");
    this->camera->start();
    this->cancelRequests = false;
    this->previewStarted = true;

    // TODO: queue requests
    for (auto& req: this->reqs) {
    this->camera->queueRequest(req.get());
    }

    this->previewStarted = true;

    return 0;
    }

    void stopVideoStream() {
    Log::info("Stopping video stream");

    this->previewStarted = false;
    this->cancelRequests = true; // TODO: cancel requests here?
    this->camera->stop();

    // TODO: requests -> destroy?
    }

    ~CameraManager() {
    this->camera->release();

    // Free memory
    delete this->fba;
    this->camera.reset();
    this->mgr.reset();
    }

    std::string const& cameraId() {
    return this->camera->id();
    }

    bool cameraStarted() {
    return this->previewStarted;
    }

    private:
    void requestCompleted(libcamera::Request* request) {
    if (request->status() == libcamera::Request::RequestCancelled) {
    if (this->cameraStarted())
    Log::warn("Hardware timeout; TODO: handle");
    }

    auto cfg = this->cfgs->at(0);
    auto stream = cfg.stream();
    auto scfg = stream->configuration();
    Log::info("Stream: \n- pixelFormat: " + std::to_string(scfg.pixelFormat.fourcc()) + "(" + scfg.pixelFormat.toString() + ")\n" +
    "- size: " + std::to_string(scfg.size.width) + ", " + std::to_string(scfg.size.height) + "\n" +
    "- stride: " + std::to_string(scfg.stride) + "\n" +
    "- frameSize: " + std::to_string(scfg.frameSize) + "\n" +
    "- bufferCount: " + std::to_string(scfg.bufferCount) + "\n" +
    "- colorSpace: " + (scfg.colorSpace.has_value() ? (*cfg.colorSpace).toString() : "none")
    );

    std::cout << "Request metadata:" << std::endl;
    const libcamera::ControlList &requestMetadata = request->metadata();
    for (const auto &ctrl : requestMetadata) {
    const libcamera::ControlId *id = libcamera::controls::controls.at(ctrl.first);
    const libcamera::ControlValue &value = ctrl.second;

    std::cout << "\t" << id->name() << " = " << value.toString() << std::endl;
    }

    // Get the buffer associated to this stream
    auto buffer = request->findBuffer(stream);
    if (auto memBuf = std::find_if(
    this->memoryMappedFrameBuffers.begin(),
    this->memoryMappedFrameBuffers.end(),
    [&buffer](const MemoryMappedFrameBuffer& memMappedFB) {
    return memMappedFB.fb == buffer;
    }
    ); memBuf != std::end(this->memoryMappedFrameBuffers)) {
    if (!this->cancelRequests) {
    //const std::vector<libcamera::Plane>& planes = memBuf.fb->planes();
    //assert(planes.size() == 1); // YUYV and ARGB have size of 1
    // First plane memory map
    auto elem = memBuf->mmaps.begin();
    void* memPtr = std::get<0>(elem->second);
    size_t memSize = std::get<1>(elem->second);
    this->onViewFrame(memPtr, this->viewfinderStride, memSize);
    request->reuse(libcamera::Request::ReuseFlag::ReuseBuffers);
    this->camera->queueRequest(request);
    } else {
    request->reuse(libcamera::Request::ReuseFlag::ReuseBuffers);
    }
    } else {
    Log::warn("Buffer with frame buffer pointer" + std::to_string(reinterpret_cast<intptr_t>(buffer)) + " not found");
    }
    }
    };

    #include <libyuv/convert_argb.h>
    #include <stdio.h>
    static void* screen_buffer;
    void onFrame(void* frame, int stride, size_t size) {
    char out[128];
    snprintf(out, 128, "On frame %p, %i, %zu", frame, stride, size);
    Log::info(out);
    memcpy(screen_buffer, frame, stride * screen_height); //screen_width * screen_height * 2);
    // FILE* f = fopen("test", "wb");
    // //std::cout << stride;
    // std::cout << screen_width * screen_height * 2 << std::endl;
    // if (int byteCount = fwrite(frame, 1, screen_width * screen_height * 2, f); byteCount != screen_width * screen_height * 2) {
    // std::cerr << "Error writing file; " << byteCount << " bytes written (" << strerror(errno) << ")" << std::endl;
    // };
    // fclose(f);
    // exit(1);
    return;
    libyuv::YUY2ToARGB(
    (const uint8_t*) frame,
    stride,
    (uint8_t*) screen_buffer,
    screen_width * screen_bytes,
    screen_width,
    screen_height
    );
    }

    #include <linux/fb.h>
    #include <linux/videodev2.h>
    #include <sys/ioctl.h>
    #include <fcntl.h>

    // Will preview for 5 seconds to the linux frame buffer
    int main(void) {
    // auto mgr = libcamera::CameraManager();
    // mgr.start();
    // std::cout << mgr.version() << std::endl;
    // auto cameras = mgr.cameras();
    // std::cout << "Found " << cameras.size() << " cameras!" << std::endl;
    // if (cameras.size() == 0) {
    // throw "";
    // }
    int fbfd = open("/dev/fb0", O_RDWR);
    if (fbfd < 0) {
    std::cout << "Couldn't open framebuffer: " << strerror(errno) << std::endl;
    exit(1);
    }

    struct fb_var_screeninfo vinfo;
    if (ioctl(fbfd, FBIOGET_VSCREENINFO, &vinfo)) {
    std::cerr << "Error reading variable information" << std::endl;
    }

    vinfo.yoffset = 0;
    vinfo.activate |= FB_ACTIVATE_FORCE;

    std::cerr << "Selecting RGB32 color space" << std::endl;
    vinfo.bits_per_pixel = 32;
    vinfo.nonstd = 0;
    vinfo.colorspace = V4L2_PIX_FMT_RGB32;
    // vinfo.xres = 1024;
    // vinfo.yres = 600;

    if (ioctl(fbfd, FBIOPUT_VSCREENINFO, &vinfo)) {
    std::cerr << "Error setting variable information: " << strerror(errno) << std::endl;
    exit(1);
    }

    assert(vinfo.xres <= vinfo.xres_virtual);
    assert(vinfo.bits_per_pixel == 32);
    assert(vinfo.colorspace == V4L2_PIX_FMT_RGB32);

    int w = vinfo.xres;
    int h = vinfo.yres;
    screen_width = w;
    screen_height = h;
    Log::info("W = " + std::to_string(screen_width));
    Log::info("H = " + std::to_string(screen_height));
    sleep(1);
    int bpp = vinfo.bits_per_pixel;
    int bytes = bpp / 8;
    screen_bytes = bytes;
    Log::info("w = " + std::to_string(w));
    Log::info("w = " + std::to_string(h));

    int colorspace = vinfo.colorspace;
    Log::info("Colorspace: " + std::to_string(colorspace));
    Log::info("RGBA: " + std::to_string(PF_RGBA));

    int fb_data_size = w * h * bytes;
    void* fbdata = mmap(0, fb_data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fbfd, (off_t)0);
    screen_buffer = fbdata;
    memset(fbdata, 0xFFFF00FF, fb_data_size);
    Log::info("Cleared screen");
    sleep(1);

    try {
    CameraManager mgr;
    event_loop(mgr);
    } catch (std::exception const& e) {
    Log::err(std::string("ERROR: *** ") + e.what() + " ***");
    return -1;
    //CameraManager mgr = CameraManager(0, 1024, 600, &onFrame);
    CameraManager mgr = CameraManager(0, w, h, &onFrame);
    Log::info("Mgr colorspace: " + std::to_string(mgr.pixelFormat));
    Log::info("Colorspace: " + std::to_string(colorspace));
    Log::info("Bytes: " + std::to_string(bytes));
    Log::info("RGBA: " + std::to_string(PF_RGBA));
    Log::info("YUYV: " + std::to_string(PF_YUYV));
    if (mgr.startVideoStream()) {
    std::cerr << "An error occured" << std::endl;
    } else {
    std::cerr << "Video stream started successfully" << std::endl;
    }
    sleep(5);
    mgr.stopVideoStream();
    // Wait for last frames to be processed
    sleep(5);
    } catch (char* exc) {
    std::cerr << exc << std::endl;
    } catch (std::string exc) {
    std::cerr << exc << std::endl;
    } catch (const std::exception& exc) {
    std::cerr << exc.what() << std::endl;
    }

    CLEAN:
    munmap(fbdata, fb_data_size);
    close(fbfd);

    return 0;
    }
  3. Jomy10 renamed this gist Jan 8, 2024. 1 changed file with 0 additions and 0 deletions.
    File renamed without changes.
  4. Jomy10 created this gist Jan 8, 2024.
    1 change: 1 addition & 0 deletions builld.sh
    Original file line number Diff line number Diff line change
    @@ -0,0 +1 @@
    clang++ main.cpp $(pkg-config libcamera --libs --cflags) -std=gnu++20
    81 changes: 81 additions & 0 deletions main.cpp
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,81 @@
    #include <exception>
    #include <iostream>

    #include <libcamera/libcamera.h>
    #include <stdexcept>

    namespace Log {
    void log(std::string level, std::string msg) {
    std::cerr << "[" << level << "] " << msg << std::endl;
    }

    void info(std::string msg) {
    Log::log(std::string("INFO"), msg);
    }

    void warn(std::string msg) {
    Log::log(std::string("WARN"), msg);
    }

    void err(std::string msg) {
    Log::log(std::string("ERR"), msg);
    }

    void verbose(std::string msg) {
    Log::log(std::string("VERBOSE"), msg);
    }
    }

    class CameraManager {
    public:
    libcamera::CameraManager

    enum MsgType {
    Timeout,
    Quit,
    RequestComplete
    };

    void openCamera() {
    Log::info("Opening camera...");
    }
    };

    static void event_loop(CameraManager& mgr) {
    mgr.openCamera();
    mgr.configureViewFinder();
    mgr.startCamera();
    for (unsigned int count = 0; count < 1000; count++) {
    CameraManager::Msg msg = mgr.wait();
    if (msg.type == CameraManager::MsgType::Timeout) {
    Log::err("Device timeout detected, attempting to restart")
    mgr.stopCamera();
    mgr.startCamera();
    continue;
    }
    if (msg.type == CameraManager::MsgType::Quit) {
    return;
    } else if (msg.type != CameraManager::MsgType::RequestComplete) {
    throw std::runtime_error("unrecognized message!");
    }

    CompletedRequestPtr& completedRequest = std::get<CompletedRequestPtr>(msg.payload);

    if (mgr.viewfinderStream()) {
    Log::info("Viewfinder frame " + std::to_string(count));
    // AF?
    mgr.showPreview(completedRequest, mgr.viewfinderStream());
    }
    }
    }

    int main(void) {
    try {
    CameraManager mgr;
    event_loop(mgr);
    } catch (std::exception const& e) {
    Log::err(std::string("ERROR: *** ") + e.what() + " ***");
    return -1;
    }
    return 0;
    }