diff --git a/config.json b/config.json index a1d32d1..d411d4c 100644 --- a/config.json +++ b/config.json @@ -37,7 +37,7 @@ { "json-save": true, "json-save-directory": "local/scans", - "barcode": "1703543090000", + "barcode": "", "capture-device": "/dev/video0" }, "api": diff --git a/lib/sfw b/lib/sfw index 17adaed..569e203 160000 --- a/lib/sfw +++ b/lib/sfw @@ -1 +1 @@ -Subproject commit 17adaed169c40536cb9b25425c64712b1d0f74d1 +Subproject commit 569e203409993ea1d34bff910b80f72adb47b690 diff --git a/src/Pudding.cpp b/src/Pudding.cpp index 4b6ddc4..acd6709 100644 --- a/src/Pudding.cpp +++ b/src/Pudding.cpp @@ -47,8 +47,54 @@ Pudding::Pudding() log(message.str()); /* initialize a zbar image scanner for reading barcodes of any format */ image_scanner.set_config(zbar::ZBAR_NONE, zbar::ZBAR_CFG_ENABLE, 1); - /* use sdl context for now */ - load_sdl_context(); + /* use gl context so we can draw 3D pudding */ + load_gl_context(); +} + +void Pudding::load_gl_context() +{ + super::load_gl_context(); + /* Allocate a vertex array object, bind it as current, doesn't need to be a member var because the same one is always bound */ + GLuint vao; + glGenVertexArrays(1, &vao); + glBindVertexArray(vao); + /* 2D vertices for the video capture texture that are a single plane spanning the screen */ + std::array camera_vertices = { + { + {-1.0f, 1.0f}, {1.0f, 1.0f}, {-1.0f, -1.0f}, + {1.0f, 1.0f}, {1.0f, -1.0f}, {-1.0f, -1.0f} + }}; + /* UV map for mapping video capture texture to video capture vertices */ + std::array camera_uv = { + { + {0.0f, 1.0f}, {1.0f, 1.0f}, {0.0f, 0.0f}, + {1.0f, 1.0f}, {1.0f, 0.0f}, {0.0f, 0.0f} + }}; + /* generate one vertex buffer object to hold the camera texture and UV vertices */ + glGenBuffers(1, &vbo); + glBindBuffer(GL_ARRAY_BUFFER, vbo); + /* allocate space for vertices and UV, copy vertices in at initialization */ + GLsizeiptr vbo_size = (camera_vertices.size() + camera_uv.size()) * sizeof(glm::vec2); + glBufferData(GL_ARRAY_BUFFER, vbo_size, camera_vertices.data(), GL_STATIC_DRAW); + /* specify the location and data format of the vertex attributes as consecutive 2D float coords */ + glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, nullptr); + /* enable index 0 on currently bound VAO */ + glEnableVertexAttribArray(0); + /* copy UV data into the VBO, offset to after the vertex data */ + glBufferSubData(GL_ARRAY_BUFFER, camera_vertices.size() * sizeof(glm::vec2), camera_uv.size() * sizeof(glm::vec2), camera_uv.data()); + glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, reinterpret_cast(camera_vertices.size() * sizeof(glm::vec2))); + glEnableVertexAttribArray(1); + GLuint vertex_shader = load_shader("src/flat.vert", GL_VERTEX_SHADER); + GLuint fragment_shader = load_shader("src/flat.frag", GL_FRAGMENT_SHADER); + world_program = glCreateProgram(); + glAttachShader(world_program, vertex_shader); + glAttachShader(world_program, fragment_shader); + glBindAttribLocation(world_program, 0, "in_Position"); + glBindAttribLocation(world_program, 1, "vertexUV"); + link_shader(world_program); + /* generate the texture that will store the video frame */ + glGenTextures(1, &video_capture_texture_id); + log_gl_errors(); } /* Respond to command events */ @@ -97,7 +143,8 @@ void Pudding::add_item(const std::string& upc) incorporate_best_buy_api(item); } items.push_back(item); - increment_item_index(); + /* set item index to end so newest item will display */ + current_item_index = items.size() - 1; } /* Look for item upc in the Open Food API, and use the result to fill out item properties if found @@ -390,22 +437,37 @@ void Pudding::update() SDL_RenderCopyF(get_renderer(), get_current_item().get_active_image_texture().get(), nullptr, &item_box); video_box.set_left(get_window_box().get_center_x(), true); } - /* draw the camera to the right half of the screen if the camera has been opened */ + /* draw the camera if the camera has been opened, fullscreen if there aren't any items, or on the right otherwise */ if (capture.isOpened()) { capture.read(capture_frame); if (!capture_frame.empty()) { /* convert opencv matrix to sdl texture */ - SDL_Texture* texture = SDL_CreateTexture( - get_renderer(), SDL_PIXELFORMAT_BGR24, SDL_TEXTUREACCESS_STATIC, capture_frame.cols, capture_frame.rows); - SDL_UpdateTexture(texture, nullptr, static_cast(capture_frame.data), capture_frame.step1()); - SDL_RenderCopyF(get_renderer(), texture, nullptr, &video_box); - SDL_DestroyTexture(texture); - /* scan with zbar */ - cv::Mat gray; - cv::cvtColor(capture_frame, gray, cv::COLOR_BGR2GRAY); - zbar::Image query_image(gray.cols, gray.rows, "Y800", static_cast(gray.data), gray.cols * gray.rows); + // SDL_Texture* texture = SDL_CreateTexture( + // get_renderer(), SDL_PIXELFORMAT_BGR24, SDL_TEXTUREACCESS_STATIC, capture_frame.cols, capture_frame.rows); + // SDL_UpdateTexture(texture, nullptr, static_cast(capture_frame.data), capture_frame.step1()); + // SDL_RenderCopyF(get_renderer(), texture, nullptr, &video_box); + // SDL_DestroyTexture(texture); + glUseProgram(world_program); + /* rotate the opencv matrix 180 to work with opengl coords */ + cv::flip(capture_frame, capture_frame, -1); + /* bind texture to GLSL sampler */ + GLint base_texture_location = glGetUniformLocation(world_program, "baseTexture"); + glUniform1i(base_texture_location, 0); + glActiveTexture(GL_TEXTURE0); + glBindTexture(GL_TEXTURE_2D, video_capture_texture_id); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + /* convert opencv matrix to GL texture */ + glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, capture_frame.cols, capture_frame.rows, 0, GL_BGR, GL_UNSIGNED_BYTE, capture_frame.ptr()); + glDrawArrays(GL_TRIANGLES, 0, 6); + SDL_GL_SwapWindow(get_window()); + log_gl_errors(); + /* convert to gray and scan with zbar */ + cv::cvtColor(capture_frame, capture_frame, cv::COLOR_BGR2GRAY); + zbar::Image query_image(capture_frame.cols, capture_frame.rows, "Y800", static_cast(capture_frame.data), + capture_frame.cols * capture_frame.rows); int result = image_scanner.scan(query_image); if (result > 0) { @@ -414,8 +476,8 @@ void Pudding::update() std::stringstream message; message << "camera scanned " << symbol->get_type_name() << " symbol " << symbol->get_data(); log(message.str()); - current_camera_barcode = symbol->get_data(); - current_barcode = current_camera_barcode; + // current_camera_barcode = symbol->get_data(); + // current_barcode = current_camera_barcode; } } query_image.set_data(nullptr, 0); diff --git a/src/Pudding.hpp b/src/Pudding.hpp index f721698..589deff 100644 --- a/src/Pudding.hpp +++ b/src/Pudding.hpp @@ -40,7 +40,9 @@ private: cv::VideoCapture capture; cv::Mat capture_frame; zbar::ImageScanner image_scanner; + GLuint vbo, world_program, video_capture_texture_id; + void load_gl_context(); void incorporate_open_food_api(Item&); void incorporate_nutronix_api(Item&); void incorporate_edamam_api(Item&); diff --git a/src/flat.frag b/src/flat.frag new file mode 100644 index 0000000..a48288c --- /dev/null +++ b/src/flat.frag @@ -0,0 +1,9 @@ +#version 130 + +in vec2 UV; +uniform sampler2D baseTexture; + +void main(void) +{ + gl_FragColor = texture(baseTexture, UV); +} diff --git a/src/flat.vert b/src/flat.vert new file mode 100644 index 0000000..7edfcb9 --- /dev/null +++ b/src/flat.vert @@ -0,0 +1,12 @@ +#version 130 + +in vec2 in_Position; +in vec2 vertexUV; + +out vec2 UV; + +void main(void) +{ + gl_Position = vec4(in_Position, 0, 1); + UV = vertexUV; +}