Skip to content

Instantly share code, notes, and snippets.

@dkurt
Last active August 6, 2024 07:32
Show Gist options
  • Save dkurt/59a7e7b5b45f9c46b31e01ce47d098a2 to your computer and use it in GitHub Desktop.
Save dkurt/59a7e7b5b45f9c46b31e01ce47d098a2 to your computer and use it in GitHub Desktop.

Revisions

  1. dkurt revised this gist Oct 8, 2020. No changes.
  2. dkurt revised this gist Oct 8, 2020. 1 changed file with 2 additions and 3 deletions.
    5 changes: 2 additions & 3 deletions main.cpp
    Original file line number Diff line number Diff line change
    @@ -55,9 +55,8 @@ int main(int argc, char** argv) {
    launcher.get(prediction);
    }
    gettimeofday(&end, 0);
    std::cout << (end.tv_sec - start.tv_sec) * 1e+3 +
    (end.tv_usec - start.tv_usec) * 1e-3 << std::endl;

    std::cout << "FPS: " << num / ((end.tv_sec - start.tv_sec) +
    (end.tv_usec - start.tv_usec) * 1e-6) << std::endl;
    dataThread.join();
    return 0;
    }
  3. dkurt revised this gist Oct 8, 2020. 2 changed files with 137 additions and 2 deletions.
    14 changes: 13 additions & 1 deletion CMakeLists.txt
    Original file line number Diff line number Diff line change
    @@ -1 +1,13 @@
    1
    cmake_minimum_required(VERSION 3.4.3)

    project(sample CXX)

    find_package(InferenceEngine REQUIRED)

    add_executable(${CMAKE_PROJECT_NAME} main.cpp)
    target_compile_features(${CMAKE_PROJECT_NAME} PRIVATE cxx_range_for)

    target_link_libraries(${CMAKE_PROJECT_NAME}
    ${InferenceEngine_LIBRARIES}
    pthread
    )
    125 changes: 124 additions & 1 deletion main.cpp
    Original file line number Diff line number Diff line change
    @@ -1 +1,124 @@
    2
    #include <queue>
    #include <mutex>
    #include <thread>
    #include <sys/time.h>
    #include <condition_variable>

    #include <inference_engine.hpp>

    using namespace InferenceEngine;

    class OpenVINOLauncher {
    public:
    OpenVINOLauncher(const std::string& xmlPath, const std::string& device);

    // Request data to be processed.
    void process(const std::vector<float>& data);

    // Wait for ready prediction. Order is preserved.
    void get(std::vector<float>& prediction);

    unsigned int nireq;

    private:
    Core ie;
    std::string outputName;
    ExecutableNetwork execNet;
    std::queue<InferRequest::Ptr> requests;
    std::queue<InferRequest::Ptr> idleRequests;

    std::queue<std::vector<float> > predictions;
    std::mutex predictionsMutex;
    std::condition_variable prediction_ready;
    std::thread worker;
    };

    int main(int argc, char** argv) {
    const int num = 100;
    OpenVINOLauncher launcher(argv[1], argv[2]);

    // This thread imitates input data source
    std::thread dataThread([&](){
    std::vector<float> inputData(1*3*224*224);
    for (int i = 0; i < num; ++i) {
    launcher.process(inputData);
    std::this_thread::sleep_for(std::chrono::milliseconds(1));
    }
    });

    // Retrieve predictions
    timeval start, end;
    gettimeofday(&start, 0);
    std::vector<float> prediction;
    // TODO: process last <nireq> predictions
    for (int i = 0; i < num - launcher.nireq; ++i) {
    launcher.get(prediction);
    }
    gettimeofday(&end, 0);
    std::cout << (end.tv_sec - start.tv_sec) * 1e+3 +
    (end.tv_usec - start.tv_usec) * 1e-3 << std::endl;

    dataThread.join();
    return 0;
    }

    OpenVINOLauncher::OpenVINOLauncher(const std::string& xmlPath, const std::string& device) {
    CNNNetwork net = ie.ReadNetwork(xmlPath);

    std::map<std::string, std::string> config;
    if (device.find("CPU") != std::string::npos) {
    config["CPU_THROUGHPUT_STREAMS"] = "CPU_THROUGHPUT_AUTO";
    }
    if (device.find("GPU") != std::string::npos) {
    config["GPU_THROUGHPUT_STREAMS"] = "GPU_THROUGHPUT_AUTO";
    }
    execNet = ie.LoadNetwork(net, device, config);

    nireq = execNet.GetMetric(METRIC_KEY(OPTIMAL_NUMBER_OF_INFER_REQUESTS)).as<unsigned int>();
    outputName = net.getOutputsInfo().begin()->first;

    std::cout << "Inference requests: " << nireq << std::endl;
    for (int i = 0; i < nireq; ++i) {
    idleRequests.push(execNet.CreateInferRequestPtr());
    }
    }

    void OpenVINOLauncher::process(const std::vector<float>& input) {
    // Wait for readiness of the oldest inference requests if there is no
    // idle requests to process new input data.
    if (idleRequests.empty())
    requests.front()->Wait(IInferRequest::RESULT_READY);

    // Release top finished requests
    while (!requests.empty()) {
    auto req = requests.front();
    if (req->Wait(IInferRequest::STATUS_ONLY) != StatusCode::OK) // if not ready
    break;

    Blob::Ptr out = req->GetBlob(outputName);
    float* data = out->buffer();

    std::unique_lock<std::mutex> lock(predictionsMutex);
    predictions.push(std::vector<float>(data, data + out->size()));
    prediction_ready.notify_one();

    requests.pop();
    idleRequests.push(req);
    }

    // Start a new request
    auto req = idleRequests.front();
    float* data = const_cast<float*>(input.data());
    Blob::Ptr inputBlob = make_shared_blob<float>(TensorDesc(Precision::FP32, {1, 3, 224, 224}, Layout::NCHW), data);
    req->SetBlob("input_1", inputBlob);
    req->StartAsync();
    idleRequests.pop();
    requests.push(req);
    }

    void OpenVINOLauncher::get(std::vector<float>& prediction) {
    std::unique_lock<std::mutex> lock(predictionsMutex);
    prediction_ready.wait(lock, [&]{ return !predictions.empty(); });
    prediction = predictions.front();
    predictions.pop();
    }
  4. dkurt created this gist Oct 8, 2020.
    1 change: 1 addition & 0 deletions CMakeLists.txt
    Original file line number Diff line number Diff line change
    @@ -0,0 +1 @@
    1
    1 change: 1 addition & 0 deletions main.cpp
    Original file line number Diff line number Diff line change
    @@ -0,0 +1 @@
    2