seedlin 发表于 2019-3-5 17:50
车牌识别的那个人写的就是2个模型同时运行的,你init必须是2个对象,你不会只创建一个rknn对象,然后init两 ...
nevin007 发表于 2019-3-5 18:01
我用的是c++的API,在init前好像只有加载rknn模型的操作
elooon 发表于 2019-3-5 19:54
你是只分配了一个rknn_context变量,然后对这个变量执行了两次init?
yhc 发表于 2019-3-6 10:25
请问单独初始化第二个模型可以吗
yhc 发表于 2019-3-6 10:51
-6是报RKNN_ERR_MODEL_INVALID,17350确定是第二个模型的大小是吗,有没有读错文件什么的呢 ...
nevin007 发表于 2019-3-6 11:39
是的,模型我都单独运行过,没有问题...不会是同时只能有一个ctx吧...
灰掉上半部分,初始化下半部分是正 ...
protossw512 发表于 2019-3-7 11:40
你好,你在c++运行的时候有发现c++下面跑模型比python下面慢么?
nevin007 发表于 2019-3-7 13:41
推理速度还好,主要是init时间太长了
protossw512 发表于 2019-3-7 16:36
我今天专门分别在两边测试了下,发现c++ inference的速度比Python慢差不多一倍。不知道是哪里的问题。。 ...
nevin007 发表于 2019-3-7 16:48
你好,我用的C++API版本是0.9.3,我测试过它自带的mobilenet-ssd,用SDK自带的测试函数:python约10ms,c ...
protossw512 发表于 2019-3-8 05:24
奇怪了,我这里直接跑那个rknn_ssd需要22ms。你是在pc上交叉编译好了然后再在3399pro上运行的吗?还是直 ...
protossw512 发表于 2019-3-8 07:09
我终于找到原因了,因为我在init的时候加了RKNN_FLAG_COLLECT_PERF_MASK的flag,所以速度会变慢。。。然而 ...
nevin007 发表于 2019-3-8 09:21
那你能同时初始化多个模型再做推理么
protossw512 发表于 2019-3-9 05:16
我还没试过,项目暂时还没用到多个模型。你是怎么实现的?是同样的rknn_context init多次么? 还是说不同 ...
nevin007 发表于 2019-3-11 09:30
不同的context,我试过python可以,c++不行...
- #include <stdio.h>
- #include <stdint.h>
- #include <stdlib.h>
- #include <fstream>
- #include <iostream>
- #include <algorithm>
- #include <queue>
- #include <sys/time.h>
- #include "rknn_api.h"
- #include "opencv2/core/core.hpp"
- #include "opencv2/imgproc/imgproc.hpp"
- #include "opencv2/highgui/highgui.hpp"
- using namespace std;
- template <class T>
- void get_top_n(T* prediction, int prediction_size, size_t num_results,
- float threshold, std::vector<std::pair<float, int>>* top_results,
- bool input_floating) {
- // Will contain top N results in ascending order.
- std::priority_queue<std::pair<float, int>, std::vector<std::pair<float, int>>,
- std::greater<std::pair<float, int>>>
- top_result_pq;
- const long count = prediction_size; // NOLINT(runtime/int)
- for (int i = 0; i < count; ++i) {
- float value;
- if (input_floating)
- value = prediction[i];
- else
- value = prediction[i] / 255.0;
- // Only add it if it beats the threshold and has a chance at being in
- // the top N.
- if (value < threshold) {
- continue;
- }
- top_result_pq.push(std::pair<float, int>(value, i));
- // If at capacity, kick the smallest value out.
- if (top_result_pq.size() > num_results) {
- top_result_pq.pop();
- }
- }
- // Copy to output vector and reverse into descending order.
- while (!top_result_pq.empty()) {
- top_results->push_back(top_result_pq.top());
- top_result_pq.pop();
- }
- std::reverse(top_results->begin(), top_results->end());
- }
- int ReadLabelsFile(const string& file_name,
- std::vector<string>* result,
- size_t* found_label_count) {
- std::ifstream file(file_name);
- if (!file) {
- std::cerr << "Labels file " << file_name << " not found\n";
- return -1;
- }
- result->clear();
- string line;
- while (std::getline(file, line)) {
- result->push_back(line);
- }
- *found_label_count = result->size();
- const int padding = 16;
- while (result->size() % padding) {
- result->emplace_back();
- }
- return 0;
- }
- int main(int argc, char** argv)
- {
- const char *img_path = "/home/toybrick/work/test/tmp/dog.jpg";
- const char *model_path = "/home/toybrick/work/test/tmp/mobilenet_v1-tf.rknn";
- const char *model_path2 = "/home/toybrick/work/test/tmp/mobilenet_ssd.rknn";
- const char *lable_path = "/home/toybrick/work/test/tmp/labels.txt";
- const int output_elems = 1001;
- const int img_width = 224;
- const int img_height = 224;
- const int img_channels = 3;
- const int input_index = 0; // node name "input"
- const int output_index = 0; // node name "MobilenetV1/Predictions/Reshape_1"
- // Load image
- cv::Mat img = cv::imread(img_path, 1);
- if(!img.data) {
- printf("cv::imread %s fail!\n", img_path);
- return -1;
- }
- if(img.cols != img_width || img.rows != img_height)
- cv::resize(img, img, cv::Size(img_width, img_height), (0, 0), (0, 0), cv::INTER_LINEAR);
- //BGR->RGB
- cv::cvtColor(img, img, cv::COLOR_BGR2RGB);
- // Load model
- FILE *fp = fopen(model_path, "rb");
- if(fp == NULL) {
- printf("fopen %s fail!\n", model_path);
- return -1;
- }
- fseek(fp, 0, SEEK_END);
- int model_len = ftell(fp);
- void *model = malloc(model_len);
- fseek(fp, 0, SEEK_SET);
- if(model_len != fread(model, 1, model_len, fp)) {
- printf("fread %s fail!\n", model_path);
- free(model);
- return -1;
- }
- // Start Inference
- rknn_input inputs[1];
- rknn_output outputs[1];
- rknn_tensor_attr output0_attr;
- int ret = 0;
- void *model2;
- rknn_context ctx1 = 0;
- rknn_context ctx2 = 0;
- ret = rknn_init(&ctx1, model, model_len, RKNN_FLAG_PRIOR_MEDIUM);
- if(ret < 0) {
- printf("rknn_init fail! ret=%d\n", ret);
- goto Error;
- }
- printf("rknn_init 1 succeed! ret=%d\n", ret);
- fclose(fp);
- model_len = 0;
- // Load model
- fp = fopen(model_path2, "rb");
- if(fp == NULL) {
- printf("fopen %s fail!\n", model_path);
- return -1;
- }
- fseek(fp, 0, SEEK_END);
- model_len = ftell(fp);
- model2 = malloc(model_len);
- fseek(fp, 0, SEEK_SET);
- if(model_len != fread(model2, 1, model_len, fp)) {
- printf("fread %s fail!\n", model_path2);
- free(model2);
- return -1;
- }
- ret = rknn_init(&ctx2, model2, model_len, RKNN_FLAG_PRIOR_MEDIUM);
- if(ret < 0) {
- printf("rknn_init fail! ret=%d\n", ret);
- goto Error;
- }
- printf("rknn_init 2 succeed! ret=%d\n", ret);
- output0_attr.index = 0;
- ret = rknn_query(ctx1, RKNN_QUERY_OUTPUT_ATTR, &output0_attr, sizeof(output0_attr));
- if(ret < 0) {
- printf("rknn_query fail! ret=%d\n", ret);
- goto Error;
- }
- inputs[0].index = input_index;
- inputs[0].buf = img.data;
- inputs[0].size = img_width * img_height * img_channels;
- inputs[0].pass_through = false;
- inputs[0].type = RKNN_TENSOR_UINT8;
- inputs[0].fmt = RKNN_TENSOR_NHWC;
- ret = rknn_inputs_set(ctx1, 1, inputs);
- if(ret < 0) {
- printf("rknn_input_set fail! ret=%d\n", ret);
- goto Error;
- }
- ret = rknn_run(ctx1, nullptr);
- if(ret < 0) {
- printf("rknn_run fail! ret=%d\n", ret);
- goto Error;
- }
- outputs[0].want_float = true;
- outputs[0].is_prealloc = false;
- ret = rknn_outputs_get(ctx1, 1, outputs, nullptr);
- if(ret < 0) {
- printf("rknn_outputs_get fail! ret=%d\n", ret);
- goto Error;
- }
- // Process output
- if(outputs[0].size == output0_attr.n_elems * sizeof(float))
- {
- const size_t num_results = 5;
- const float threshold = 0.001f;
- std::vector<std::pair<float, int>> top_results;
- get_top_n<float>((float*)outputs[0].buf, output_elems,
- num_results, threshold, &top_results, true);
- std::vector<string> labels;
- size_t label_count;
- if (!ReadLabelsFile(lable_path, &labels, &label_count)) {
- for (const auto& result : top_results) {
- const float confidence = result.first;
- const int index = result.second;
- std::cout << confidence << ": " << index << " " << labels[index] << "\n";
- }
- }
- }
- else
- {
- printf("rknn_outputs_get fail! get output_size = [%d], but expect %u!\n",
- outputs[0].size, (uint32_t)(output0_attr.n_elems * sizeof(float)));
- }
- rknn_outputs_release(ctx1, 1, outputs);
- Error:
- if(ctx1 > 0) rknn_destroy(ctx1);
- if(model) free(model);
- if(fp) fclose(fp);
- return 0;
- }
复制代码
elooon 发表于 2019-3-11 16:29
outputs:
[toybrick@localhost build]$ ./rknn_mobilenet
hjf515 发表于 2019-3-11 19:23
请问加载多个模型的问题有解决么?
我也需要加载多个模型,每次加载前都需要init,那就太慢了。 ...
nevin007 发表于 2019-3-12 10:22
非常感谢!我测试了你的Demo,确实可以跑通。在此基础上,我进一步做了几个测试:
1、将model_path2(ctx2 ...
elooon 发表于 2019-3-12 16:06
emmm....
Can PNet which is quantificated(do_quantization=True) be init succeed alone?
欢迎光临 Toybrick (https://t.rock-chips.com/) | Powered by Discuz! X3.3 |