diff --git a/src/Item.cpp b/src/Item.cpp index 042237d..691d8cf 100644 --- a/src/Item.cpp +++ b/src/Item.cpp @@ -22,17 +22,17 @@ void Item::set_text_property(const std::string& value, std::string& property, co } } -void Item::add_image_texture(std::shared_ptr texture) +void Item::add_image_texture(std::shared_ptr texture_id) { - image_textures.push_back(texture); + image_textures.push_back(texture_id); } -const std::vector>& Item::get_image_textures() const +const std::vector>& Item::get_image_textures() const { return image_textures; } -const std::shared_ptr& Item::get_active_image_texture() const +const std::shared_ptr& Item::get_active_image_texture() const { return get_image_textures()[current_image_index]; } diff --git a/src/Item.hpp b/src/Item.hpp index f3526d2..8284191 100644 --- a/src/Item.hpp +++ b/src/Item.hpp @@ -1,6 +1,17 @@ #ifndef Item_h_ #define Item_h_ +/* including so we can use GLuint type */ +#define GL_GLEXT_PROTOTYPES +#define GLEW_STATIC +#if defined(__EMSCRIPTEN__) +#include +#include +#include +#else +#include "glew/glew.h" +#endif + #include #include #include @@ -15,7 +26,7 @@ class Item : public Node private: - std::vector> image_textures; + std::vector> image_textures; std::string brand_name = "", product_name = "", upc = ""; int current_image_index = 0; void set_text_property(const std::string&, std::string&, const std::string&); @@ -28,9 +39,9 @@ private: public: Item(Node*); - void add_image_texture(std::shared_ptr SDL_Texture); - const std::vector>& get_image_textures() const; - const std::shared_ptr& get_active_image_texture() const; + void add_image_texture(std::shared_ptr SDL_Texture); + const std::vector>& get_image_textures() const; + const std::shared_ptr& get_active_image_texture() const; void set_brand_name(const std::string&); const std::string& get_brand_name() const; void set_product_name(const std::string&); diff --git a/src/Pudding.cpp b/src/Pudding.cpp index 09d7186..74f3583 100644 --- a/src/Pudding.cpp +++ b/src/Pudding.cpp @@ -59,13 +59,13 @@ void Pudding::load_gl_context() glGenVertexArrays(1, &vao); glBindVertexArray(vao); /* 2D vertices for the video capture texture that are a single plane spanning the screen */ - std::array camera_vertices = { + std::array rectangle_vertices = { { {-1.0f, 1.0f}, {1.0f, 1.0f}, {-1.0f, -1.0f}, {1.0f, 1.0f}, {1.0f, -1.0f}, {-1.0f, -1.0f} }}; /* UV map for mapping video capture texture to video capture vertices */ - std::array camera_uv = { + std::array rectangle_uv = { { {0.0f, 1.0f}, {1.0f, 1.0f}, {0.0f, 0.0f}, {1.0f, 1.0f}, {1.0f, 0.0f}, {0.0f, 0.0f} @@ -74,15 +74,16 @@ void Pudding::load_gl_context() glGenBuffers(1, &vbo); glBindBuffer(GL_ARRAY_BUFFER, vbo); /* allocate space for vertices and UV, copy vertices in at initialization */ - GLsizeiptr vbo_size = (camera_vertices.size() + camera_uv.size()) * sizeof(glm::vec2); - glBufferData(GL_ARRAY_BUFFER, vbo_size, camera_vertices.data(), GL_STATIC_DRAW); + GLsizeiptr vbo_size = (rectangle_vertices.size() + rectangle_uv.size()) * sizeof(glm::vec2); + glBufferData(GL_ARRAY_BUFFER, vbo_size, rectangle_vertices.data(), GL_STATIC_DRAW); /* specify the location and data format of the vertex attributes as consecutive 2D float coords */ glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, nullptr); /* enable index 0 on currently bound VAO */ glEnableVertexAttribArray(0); /* copy UV data into the VBO, offset to after the vertex data */ - glBufferSubData(GL_ARRAY_BUFFER, camera_vertices.size() * sizeof(glm::vec2), camera_uv.size() * sizeof(glm::vec2), camera_uv.data()); - glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, reinterpret_cast(camera_vertices.size() * sizeof(glm::vec2))); + glBufferSubData(GL_ARRAY_BUFFER, rectangle_vertices.size() * sizeof(glm::vec2), + rectangle_uv.size() * sizeof(glm::vec2), rectangle_uv.data()); + glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, reinterpret_cast(rectangle_vertices.size() * sizeof(glm::vec2))); glEnableVertexAttribArray(1); GLuint vertex_shader = load_shader("src/flat.vert", GL_VERTEX_SHADER); GLuint fragment_shader = load_shader("src/flat.frag", GL_FRAGMENT_SHADER); @@ -98,6 +99,12 @@ void Pudding::load_gl_context() glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGB8, capture.get(cv::CAP_PROP_FRAME_WIDTH), capture.get(cv::CAP_PROP_FRAME_HEIGHT)); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + /* just need to set these once since we're drawing one texture to each viewport */ + GLint base_texture_location = glGetUniformLocation(world_program, "baseTexture"); + glUniform1i(base_texture_location, 0); + glActiveTexture(GL_TEXTURE0); + /* tell GL to use our shader */ + glUseProgram(world_program); log_gl_errors(); } @@ -157,16 +164,16 @@ void Pudding::incorporate_open_food_api(Item& item) { log("checking Open Food API"); nlohmann::json json = json_from_url(OPEN_FOOD_API_URL + item.get_upc()); - // test that should determine if an Open Food API response is not empty + /* test that should determine if an Open Food API response is not empty */ if (json.value("status", 0) && json.contains("product")) { if (json["product"].value("image_url", "") != "") { std::string url = json["product"]["image_url"]; - std::shared_ptr texture = texture_from_image_url(url); - if (texture != nullptr) + std::shared_ptr texture_id = texture_from_image_url(url); + if (texture_id != nullptr) { - item.add_image_texture(texture); + item.add_image_texture(texture_id); } } item.set_brand_name(json["product"].value("brands", "")); @@ -184,13 +191,13 @@ void Pudding::incorporate_open_food_api(Item& item) void Pudding::incorporate_nutronix_api(Item& item) { log("checking Nutronix API"); - // Nutronix requires API keys in headers for validation + /* Nutronix requires API keys in headers for validation */ nlohmann::json json = json_from_url( NUTRONIX_API_URL + item.get_upc(), { "x-app-id: " + get_configuration()["api"]["nutronix-app-id"].get(), "x-app-key: " + get_configuration()["api"]["nutronix-app-key"].get() }); - // test that should determine if a Nutronix response is not empty + /* test that should determine if a Nutronix response is not empty */ if (!(json.contains("message") && json["message"] == NUTRONIX_NOT_FOUND)) { nlohmann::json food = json["foods"][0]; @@ -198,10 +205,10 @@ void Pudding::incorporate_nutronix_api(Item& item) { std::string url = food["photo"]["thumb"]; log("adding image listed in Nutronix API at " + url); - std::shared_ptr texture = texture_from_image_url(url); - if (texture != nullptr) + std::shared_ptr texture_id = texture_from_image_url(url); + if (texture_id != nullptr) { - item.add_image_texture(texture); + item.add_image_texture(texture_id); } } item.set_brand_name(food.value("brand_name", "")); @@ -219,23 +226,23 @@ void Pudding::incorporate_nutronix_api(Item& item) void Pudding::incorporate_edamam_api(Item& item) { log("checking Edamam API"); - // build API url by concatenating relevant values into query string + /* build API url by concatenating relevant values into query string */ std::stringstream url; url << "https://api.edamam.com/api/food-database/v2/parser?upc=" << item.get_upc() << "&app_id=" << get_configuration()["api"]["edamam-app-id"].get() << "&app_key=" << get_configuration()["api"]["edamam-app-key"].get(); nlohmann::json json = json_from_url(url.str()); - // test that should determine if a Edamam response has food data + /* test that should determine if a Edamam response has food data */ if (json.contains("hints") && json["hints"][0].contains("food")) { nlohmann::json food = json["hints"][0]["food"]; if (food.value("image", "") != "") { std::string url = food["image"]; - std::shared_ptr texture = texture_from_image_url(url); - if (texture != nullptr) + std::shared_ptr texture_id = texture_from_image_url(url); + if (texture_id != nullptr) { - item.add_image_texture(texture); + item.add_image_texture(texture_id); } item.set_product_name(food.value("label", "")); } @@ -248,25 +255,25 @@ void Pudding::incorporate_edamam_api(Item& item) void Pudding::incorporate_best_buy_api(Item& item) { log("checking Best Buy API"); - // build API url by concatenating relevant values into query string + /* build API url by concatenating relevant values into query string */ std::stringstream url; url << "https://api.bestbuy.com/v1/products(upc=" << item.get_upc() << ")?format=json&apiKey=" << get_configuration()["api"]["best-buy-api-key"].get(); nlohmann::json json = json_from_url(url.str()); - // test that should determine if a Best Buy response has a result + /* test that should determine if a Best Buy response has a result */ if (json.contains("total") && json["total"].get() > 0) { nlohmann::json product = json["products"][0]; - // look up image (for games this is box art) and "alternate views image" (for games this is a screen shot) + /* look up image (for games this is box art) and "alternate views image" (for games this is a screen shot) */ for (std::string key : {"alternateViewsImage", "image"}) { if (product.value(key, "") != "") { std::string url = product[key]; - std::shared_ptr texture = texture_from_image_url(url); - if (texture != nullptr) + std::shared_ptr texture_id = texture_from_image_url(url); + if (texture_id != nullptr) { - item.add_image_texture(texture); + item.add_image_texture(texture_id); } } } @@ -377,18 +384,36 @@ size_t Pudding::curl_write_response(std::uint8_t* buffer, size_t size, size_t co return total_size; } -/* Get an image at the submitted URL as a pointer to SDL_Texture memory +/* Allocate storage for a texture, copy the cURL response data into the storage, and return the ID that corresponds to the GL texture */ -std::shared_ptr Pudding::texture_from_image_url(const std::string& url) +std::shared_ptr Pudding::texture_from_image_url(const std::string& url) { log("looking up image at " + url); std::vector storage; curl_get_bytes(url, storage); if (!storage.empty()) { - SDL_RWops* rw = SDL_RWFromConstMem(storage.data(), storage.size()); debug("received image data"); - return std::shared_ptr(IMG_LoadTexture_RW(get_renderer(), rw, 0), Pudding::destroy_texture); + /* load as an SDL surface to translate image format into pixel data, flip, and get dimensions */ + SDL_RWops* rw = SDL_RWFromConstMem(storage.data(), storage.size()); + SDL_Surface* surface = IMG_Load_RW(rw, 0); + SDL_Surface* flipped_surface = rotozoomSurfaceXY(surface, 0, 1, -1, 0); + std::ostringstream message; + message << "image is " << flipped_surface->w << "x" << flipped_surface->h; + log(message.str()); + /* generate a GL texture, allocate space and set properties */ + std::shared_ptr texture_id = std::shared_ptr(new GLuint, Pudding::destroy_texture); + glGenTextures(1, texture_id.get()); + glBindTexture(GL_TEXTURE_2D, *texture_id); + glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGB8, flipped_surface->w, flipped_surface->h); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); + glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); + /* copy image bytes into GL texture */ + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, surface->w, surface->h, GL_RGBA, GL_UNSIGNED_BYTE, flipped_surface->pixels); + log_gl_errors(); + SDL_FreeSurface(surface); + SDL_FreeSurface(flipped_surface); + return texture_id; } else { @@ -397,15 +422,15 @@ std::shared_ptr Pudding::texture_from_image_url(const std::string& } } -/* Call SDL's destroy texture function, and print a debug statement for testing. This is defined as a static member - * function and uses the SDL function instead of the inherited logging functions from Node since the object may not - * be allocated at destruction time (?) +/* Call GL's delete texture function, and print a debug statement for testing. This is defined as a static member + * function and uses the SDL logging function instead of the inherited logging functions from Node since the object + * may not be allocated at destruction time (?) */ -void Pudding::destroy_texture(SDL_Texture* texture) +void Pudding::destroy_texture(GLuint* texture_id) { /* not sure why SDL_Log works here but SDL_LogDebug and SDL_LogInfo don't */ - SDL_Log("destroying texture %p", texture); - SDL_DestroyTexture(texture); + SDL_Log("destroying texture ID %i", *texture_id); + glDeleteTextures(1, texture_id); } /* Change the currently selected item */ @@ -437,8 +462,12 @@ void Pudding::update() Box video_box = get_window_box(); if (items.size() > 0) { - Box item_box = Box({0.0f, 0.0f}, {get_window_box().get_w() / 2.0f, get_window_box().get_h()}); - SDL_RenderCopyF(get_renderer(), get_current_item().get_active_image_texture().get(), nullptr, &item_box); + /* will display over entire viewport, so we draw the generic rectangle vertices */ + glViewport(0, 0, get_window_box().get_w() / 2.0f, get_window_box().get_h()); + glBindTexture(GL_TEXTURE_2D, *get_current_item().get_active_image_texture().get()); + glDrawArrays(GL_TRIANGLES, 0, 6); + log_gl_errors(); + /* set camera display to right half of the screen */ video_box.set_left(get_window_box().get_center_x(), true); } /* draw the camera if the camera has been opened, fullscreen if there aren't any items, or on the right otherwise */ @@ -449,16 +478,11 @@ void Pudding::update() { /* rotate the opencv matrix 180 to work with opengl coords */ cv::flip(capture_frame, capture_frame, -1); - glUseProgram(world_program); - /* bind texture to GLSL sampler */ + glViewport(video_box.get_left(), 0, video_box.get_w(), video_box.get_h()); + /* bind texture, binding it to accept pixel data and to GLSL sampler */ glBindTexture(GL_TEXTURE_2D, video_capture_texture_id); - GLint base_texture_location = glGetUniformLocation(world_program, "baseTexture"); - glUniform1i(base_texture_location, 0); - glActiveTexture(GL_TEXTURE0); - /* convert opencv matrix to GL texture */ glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, capture_frame.cols, capture_frame.rows, GL_BGR, GL_UNSIGNED_BYTE, capture_frame.ptr()); glDrawArrays(GL_TRIANGLES, 0, 6); - SDL_GL_SwapWindow(get_window()); log_gl_errors(); /* convert to gray and scan with zbar */ cv::cvtColor(capture_frame, capture_frame, cv::COLOR_BGR2GRAY); @@ -472,8 +496,8 @@ void Pudding::update() std::stringstream message; message << "camera scanned " << symbol->get_type_name() << " symbol " << symbol->get_data(); log(message.str()); - // current_camera_barcode = symbol->get_data(); - // current_barcode = current_camera_barcode; + current_camera_barcode = symbol->get_data(); + current_barcode = current_camera_barcode; } } query_image.set_data(nullptr, 0); @@ -483,6 +507,7 @@ void Pudding::update() debug("video capture device frame empty"); } } + SDL_GL_SwapWindow(get_window()); /* add a new item if a new barcode was scanned or entered */ if (current_barcode != previous_barcode) { diff --git a/src/Pudding.hpp b/src/Pudding.hpp index 589deff..a78f2f9 100644 --- a/src/Pudding.hpp +++ b/src/Pudding.hpp @@ -51,8 +51,8 @@ private: nlohmann::json json_from_url(const std::string&, const std::vector& = {}); void curl_get_bytes(const std::string& url, std::vector&, const std::vector& = {}); static size_t curl_write_response(std::uint8_t*, size_t, size_t, std::vector*); - std::shared_ptr texture_from_image_url(const std::string&); - static void destroy_texture(SDL_Texture*); + std::shared_ptr texture_from_image_url(const std::string&); + static void destroy_texture(GLuint*); public: