gunkiss/src/Pudding.cpp

849 lines
37 KiB
C++

/*
_______________
//`````````````\\ + a game by @ohsqueezy (ohsqueezy.itch.io) & @sleepin (instagram.com/sleepin)
//~~~~~~~~~~~~~~~\\ + code is licensed for copy, modification and redistribution (git.nugget.fun/pudding)
//=================\\
/// \\\
///G-*U-*N-*K-*I-*S-*S\\\ 😀 Thank you for choosing Puddendo for your business 😀
//_______________________\\
```````````````````````````
Generate a custom pudding from food product UPC codes and help a pair of rats take over the video game industry, using
their extraterrestrial ability to turn trash into performance enhancing drug puddings that enable business professionals
to predict the stock market with supernatural accuracy.
*/
#include "Pudding.hpp"
/* Launch the Pudding instance's mainloop */
int main()
{
Pudding pudding = Pudding();
pudding.run();
pudding.quit();
return 0;
}
/* Initialize a Pudding instance */
Pudding::Pudding()
{
/* subscribe to command events */
get_delegate().subscribe(&Pudding::respond, this);
/* initialize a zbar image scanner for reading barcodes of any format */
image_scanner.set_config(zbar::ZBAR_NONE, zbar::ZBAR_CFG_ENABLE, 1);
/* set up pudding model */
set_pudding_model(1.0f, 1.6f, 10, 12, -.6, .6, .25);
/* use gl context so we can draw 3D */
load_gl_context();
}
/* Assign vertices, colors and texture UV coordinates to the pudding model */
void Pudding::set_pudding_model(
float top_radius, float base_radius, int ring_vertex_count, int layer_count, float min_y, float max_y, float gradient_position)
{
size_t ii;
const glm::vec3 *layer_top_color, *layer_bottom_color;
const glm::vec2 *start_vertex, *end_vertex;
float layer_top_y, layer_top_percent, layer_base_y, layer_base_percent, u_step = 1.0f / ring_vertex_count, ring_start_vertex_u;
std::vector<glm::vec2> layer_top_ring, layer_base_ring;
layer_top_ring.reserve(ring_vertex_count);
layer_base_ring.reserve(ring_vertex_count);
/* y coordinates of each ring of vertices in the pudding */
const std::map<float, float> y_coords = sb::range_percent_count(max_y, min_y, layer_count + 1);
/* loop through layers by looking at each layer's top and bottom rings simultaneously */
for (
auto layer_top_entry = y_coords.begin(), layer_base_entry = ++y_coords.begin();
layer_base_entry != y_coords.end();
layer_top_entry++, layer_base_entry++
)
{
layer_top_y = layer_top_entry->second;
layer_top_percent = layer_top_entry->first;
layer_base_y = layer_base_entry->second;
layer_base_percent = layer_base_entry->first;
layer_top_ring.clear();
layer_base_ring.clear();
sb::points_on_circle(layer_top_ring, ring_vertex_count, layer_top_percent * (base_radius - top_radius) + top_radius);
sb::points_on_circle(layer_base_ring, ring_vertex_count, layer_base_percent * (base_radius - top_radius) + top_radius);
/* layers above gradient position are brown, layers below are yellow, and the layer that contains gradient positon
* is a gradient from brown to yellow */
if (layer_top_percent <= gradient_position && layer_base_percent > gradient_position)
{
layer_top_color = &PUDDING_BROWN;
layer_bottom_color = &PUDDING_YELLOW;
}
else if (layer_top_percent <= gradient_position)
{
layer_top_color = &PUDDING_BROWN;
layer_bottom_color = &PUDDING_BROWN;
}
else
{
layer_top_color = &PUDDING_YELLOW;
layer_bottom_color = &PUDDING_YELLOW;
}
/* u coordinate will increase toward 1.0f as we go around the ring */
ring_start_vertex_u = 0.0f;
for (ii = 0; ii < layer_top_ring.size(); ii++)
{
/* triangle that includes top two vertices and first base vertex */
start_vertex = &layer_top_ring[ii];
end_vertex = &layer_top_ring[(ii + 1) % layer_top_ring.size()];
pudding_vertices.push_back({start_vertex->x, layer_top_y, start_vertex->y});
pudding_uv.push_back({ring_start_vertex_u, layer_top_percent});
pudding_vertices.push_back({end_vertex->x, layer_top_y, end_vertex->y});
pudding_uv.push_back({ring_start_vertex_u + u_step, layer_top_percent});
pudding_colors.insert(pudding_colors.end(), 2, *layer_top_color);
pudding_vertices.push_back({layer_base_ring[ii].x, layer_base_y, layer_base_ring[ii].y});
pudding_uv.push_back({ring_start_vertex_u, layer_base_percent});
pudding_colors.push_back(*layer_bottom_color);
/* triangle that includes bottom two vertices and second top vertex */
start_vertex = &layer_base_ring[ii];
pudding_vertices.push_back({start_vertex->x, layer_base_y, start_vertex->y});
pudding_uv.push_back({ring_start_vertex_u, layer_base_percent});
pudding_colors.push_back(*layer_bottom_color);
pudding_vertices.push_back({end_vertex->x, layer_top_y, end_vertex->y});
pudding_uv.push_back({ring_start_vertex_u + u_step, layer_top_percent});
pudding_colors.push_back(*layer_top_color);
end_vertex = &layer_base_ring[(ii + 1) % layer_base_ring.size()];
pudding_vertices.push_back({end_vertex->x, layer_base_y, end_vertex->y});
pudding_uv.push_back({ring_start_vertex_u + u_step, layer_base_percent});
pudding_colors.push_back(*layer_bottom_color);
ring_start_vertex_u += u_step;
}
}
/* process the top and bottom of pudding, filling each face with a triangle fan */
layer_top_ring.clear();
float y = max_y;
const glm::vec3* face_color = &PUDDING_BROWN;
Box texture_box = Box({0, 0}, {1, 1});
for (float radius : {top_radius, base_radius})
{
sb::points_on_circle(layer_top_ring, ring_vertex_count, radius);
/* loop through points on the face */
for (ii = 0; ii < layer_top_ring.size(); ii++)
{
start_vertex = &layer_top_ring[ii];
end_vertex = &layer_top_ring[(ii + 1) % layer_top_ring.size()];
/* triangle from the center of the layer_top_ring to the edge */
pudding_vertices.push_back({start_vertex->x, y, start_vertex->y});
pudding_vertices.push_back({end_vertex->x, y, end_vertex->y});
pudding_vertices.push_back({0, y, 0});
/* map circle points to UV points of the texture */
pudding_uv.push_back(*start_vertex);
pudding_uv.push_back(*end_vertex);
pudding_uv.push_back({0, 0});
}
/* single color for the entire layer_top_ring */
pudding_colors.insert(pudding_colors.end(), 3 * layer_top_ring.size(), *face_color);
y = min_y;
face_color = &PUDDING_YELLOW;
}
}
/* Create GL context via super class and load vertices, UV data, and shaders */
void Pudding::load_gl_context()
{
super::load_gl_context();
/* create another GL context for loading camera frame textures */
SDL_GL_SetAttribute(SDL_GL_SHARE_WITH_CURRENT_CONTEXT, 1);
if ((capture_frame_thread_context = SDL_GL_CreateContext(window)) == nullptr)
{
log("could not create capture frame thread context");
}
/* load background as surface, generate texture to load pixel data into, allocate storage, bind and edit texture properties */
std::unique_ptr<SDL_Surface, decltype(&SDL_FreeSurface)> surface(IMG_Load("local/tptile.jpg"), SDL_FreeSurface);
std::unique_ptr<SDL_Surface, decltype(&SDL_FreeSurface)> flipped_surface(rotozoomSurfaceXY(surface.get(), 0, 1, -1, 0), SDL_FreeSurface);
glGenTextures(1, &background_texture_id);
glBindTexture(GL_TEXTURE_2D, background_texture_id);
glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGB8, flipped_surface->w, flipped_surface->h);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, flipped_surface->w, flipped_surface->h, GL_RGBA, GL_UNSIGNED_BYTE, flipped_surface->pixels);
std::ostringstream message;
message << "loaded background image " << flipped_surface->w << "x" << flipped_surface->h;
log(message.str());
/* Allocate a vertex array object, bind it as current, doesn't need to be a member var because the same one is always bound */
GLuint vao;
glGenVertexArrays(1, &vao);
glBindVertexArray(vao);
/* 2D vertices for any texture that is a plane spanning the screen */
std::array<glm::vec2, 6> rectangle_vertices = {{
{-1.0f, 1.0f}, {1.0f, 1.0f}, {-1.0f, -1.0f},
{1.0f, 1.0f}, {1.0f, -1.0f}, {-1.0f, -1.0f}
}};
/* UV map for mapping a texture onto a plane */
std::array<glm::vec2, 6> rectangle_uv = {{
{0.0f, 1.0f}, {1.0f, 1.0f}, {0.0f, 0.0f},
{1.0f, 1.0f}, {1.0f, 0.0f}, {0.0f, 0.0f}
}};
/* Generate one vertex buffer object to hold all vertices and rectangle UV map. Since we're using one buffer, data
* will be copied in one after the other, offset to after the previous data location. The same buffer offset will
* be passed to the vertex attributes for each data. */
GLuint vbo;
glGenBuffers(1, &vbo);
glBindBuffer(GL_ARRAY_BUFFER, vbo);
/* allocate space for vertices, UV and colors, and copy rectangle vertices in at initialization */
GLsizeiptr vbo_size = (rectangle_vertices.size() + rectangle_uv.size() + pudding_uv.size()) * sizeof(glm::vec2) +
(pudding_vertices.size() + pudding_colors.size()) * sizeof(glm::vec3);
glBufferData(GL_ARRAY_BUFFER, vbo_size, rectangle_vertices.data(), GL_STATIC_DRAW);
/* specify the rectangle vertex attributes as consecutive 2D float coords */
glVertexAttribPointer(0, 2, GL_FLOAT, GL_FALSE, 0, nullptr);
/* copy rectangle UV data into the VBO, offset to after the vertex data, set up attributes */
GLintptr offset = rectangle_vertices.size() * sizeof(glm::vec2);
glBufferSubData(GL_ARRAY_BUFFER, offset, rectangle_uv.size() * sizeof(glm::vec2), rectangle_uv.data());
glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 0, reinterpret_cast<GLvoid*>(offset));
/* copy pudding vertices into VBO, offset to after the rectangle UV, and set up vertex attributes for 3D */
offset += rectangle_uv.size() * sizeof(glm::vec2);
glBufferSubData(GL_ARRAY_BUFFER, offset, pudding_vertices.size() * sizeof(glm::vec3), pudding_vertices.data());
glVertexAttribPointer(2, 3, GL_FLOAT, GL_FALSE, 0, reinterpret_cast<GLvoid*>(offset));
/* copy pudding color values into VBO, offset to after pudding vertices and set as 3D */
offset += pudding_vertices.size() * sizeof(glm::vec3);
glBufferSubData(GL_ARRAY_BUFFER, offset, pudding_colors.size() * sizeof(glm::vec3), pudding_colors.data());
glVertexAttribPointer(3, 3, GL_FLOAT, GL_FALSE, 0, reinterpret_cast<GLvoid*>(offset));
/* copy pudding UV values into VBO, offset to after pudding color vertices and set as 2D */
offset += pudding_colors.size() * sizeof(glm::vec3);
glBufferSubData(GL_ARRAY_BUFFER, offset, pudding_uv.size() * sizeof(glm::vec2), pudding_uv.data());
glVertexAttribPointer(4, 2, GL_FLOAT, GL_FALSE, 0, reinterpret_cast<GLvoid*>(offset));
/* Load two shader programs, one for rendering the flat objects, and one for rendering the 3D model. Load, configure,
* and link the flat shader program first. */
GLuint vertex_shader = load_shader("src/flat.vert", GL_VERTEX_SHADER);
GLuint fragment_shader = load_shader("src/flat.frag", GL_FRAGMENT_SHADER);
flat_program = glCreateProgram();
glAttachShader(flat_program, vertex_shader);
glAttachShader(flat_program, fragment_shader);
glBindAttribLocation(flat_program, 0, "in_position");
glBindAttribLocation(flat_program, 1, "vertex_uv");
link_shader(flat_program);
/* load, configure and link the 3D world program */
vertex_shader = load_shader("src/mvp.vert", GL_VERTEX_SHADER);
fragment_shader = load_shader("src/mvp.frag", GL_FRAGMENT_SHADER);
mvp_program = glCreateProgram();
glAttachShader(mvp_program, vertex_shader);
glAttachShader(mvp_program, fragment_shader);
glBindAttribLocation(mvp_program, 2, "in_position");
glBindAttribLocation(mvp_program, 3, "in_color");
glBindAttribLocation(mvp_program, 4, "vertex_uv");
link_shader(mvp_program);
mvp_uniform_location = glGetUniformLocation(mvp_program, "mvp");
time_uniform_location = glGetUniformLocation(mvp_program, "time");
effect_uniform_location = glGetUniformLocation(mvp_program, "effect");
log_gl_errors();
}
/* Try to create cv::VideoCapture object using device ID #0. If successful, this will create GL texture IDs and storage
* for the camera frames, so it must be called after GL context has been created. Two textures will be created, so they
* can be used as a double buffer.
*/
void Pudding::initialize_camera()
{
/* initialize an opencv capture device for getting images from an attached camera */
int device_id = 0;
capture.open(device_id);
std::stringstream message;
if (capture.isOpened())
{
message << "opened and initialized " << capture.get(cv::CAP_PROP_FRAME_WIDTH) << "x" <<
capture.get(cv::CAP_PROP_FRAME_HEIGHT) << ", " << capture.get(cv::CAP_PROP_FPS) <<
"fps video capture device ID #" << device_id << " using " << capture.getBackendName();
/* generate two textures that will store the video frames with the intention of double buffering them
* for threaded texture loading */
for (GLuint* buffer_id : {&capture_texture_front_buffer_id, &capture_texture_back_buffer_id})
{
glGenTextures(1, buffer_id);
glBindTexture(GL_TEXTURE_2D, *buffer_id);
glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGB8, capture.get(cv::CAP_PROP_FRAME_WIDTH), capture.get(cv::CAP_PROP_FRAME_HEIGHT));
// glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGB8, 320, 240);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
}
capture_texture_id = capture_texture_front_buffer_id;
}
else
{
message << "failed to open video capture device ID #" << device_id;
}
log(message.str());
}
/* Respond to command events */
void Pudding::respond(SDL_Event& event)
{
if (get_delegate().compare(event, "up"))
{
increment_item_index();
}
else if (get_delegate().compare(event, "right"))
{
get_current_item().increment_image_index();
}
else if (get_delegate().compare(event, "down"))
{
increment_item_index(-1);
}
else if (get_delegate().compare(event, "left"))
{
get_current_item().increment_image_index(-1);
}
else if (get_delegate().compare(event, "toggle-camera"))
{
if (capture.isOpened())
{
capture.release();
}
else
{
initialize_camera();
}
}
else if (get_delegate().compare(event, "toggle-item"))
{
show_item = !show_item;
}
/* The effect command switches the active effect to the next in the list, wrapping around at the end */
else if (get_delegate().compare(event, "effect"))
{
effect_id = ++effect_id % Effect::COUNT;
glUseProgram(mvp_program);
glUniform1i(effect_uniform_location, effect_id);
}
}
/* Build an Item object by submitting the upc parameter to multiple APIs and taking
* relevant results from each. Result JSON will be saved if saving is enabled in the global
* configuration
*/
void Pudding::add_item(const std::string& upc)
{
Item item(this);
item.set_upc(upc);
if (get_configuration()["api"]["open-food-enabled"])
{
incorporate_open_food_api(item);
}
if (get_configuration()["api"]["nutronix-enabled"])
{
incorporate_nutronix_api(item);
}
if (get_configuration()["api"]["edamam-enabled"])
{
incorporate_edamam_api(item);
}
if (get_configuration()["api"]["best-buy-enabled"])
{
incorporate_best_buy_api(item);
}
if (item.get_image_textures().size() > 0)
{
items.push_back(item);
/* set item index to end so newest item will display */
current_item_index = items.size() - 1;
}
else
{
std::ostringstream message;
message << "discarding item, no images found for " << upc;
log(message.str());
}
}
/* Look for item upc in the Open Food API, and use the result to fill out item properties if found
*/
void Pudding::incorporate_open_food_api(Item& item)
{
log("checking Open Food API");
nlohmann::json json = json_from_url(OPEN_FOOD_API_URL + item.get_upc());
/* test that should determine if an Open Food API response is not empty */
if (json.value("status", 0) && json.contains("product"))
{
if (json["product"].value("image_url", "") != "")
{
std::string url = json["product"]["image_url"];
std::shared_ptr<GLuint> texture_id = texture_from_image_url(url);
if (texture_id != nullptr)
{
item.add_image_texture(texture_id);
}
}
item.set_brand_name(json["product"].value("brands", ""));
item.set_product_name(json["product"].value("product_name", ""));
save_item_json(json, item, "Open_Food_API");
}
else
{
log("no results from Open Food");
}
}
/* Look for item upc in the Nutronix API, and use the result to fill out item properties if found
*/
void Pudding::incorporate_nutronix_api(Item& item)
{
log("checking Nutronix API");
/* Nutronix requires API keys in headers for validation */
nlohmann::json json = json_from_url(
NUTRONIX_API_URL + item.get_upc(), {
"x-app-id: " + get_configuration()["api"]["nutronix-app-id"].get<std::string>(),
"x-app-key: " + get_configuration()["api"]["nutronix-app-key"].get<std::string>()
});
/* test that should determine if a Nutronix response is not empty */
if (!(json.contains("message") && json["message"] == NUTRONIX_NOT_FOUND))
{
nlohmann::json food = json["foods"][0];
if (food.contains("photo") && food["photo"].value("thumb", "") != "")
{
std::string url = food["photo"]["thumb"];
log("adding image listed in Nutronix API at " + url);
std::shared_ptr<GLuint> texture_id = texture_from_image_url(url);
if (texture_id != nullptr)
{
item.add_image_texture(texture_id);
}
}
item.set_brand_name(food.value("brand_name", ""));
item.set_product_name(food.value("food_name", ""));
save_item_json(json, item, "Nutronix_API");
}
else
{
log("no results from Nutronix");
}
}
/* Submit a query to Edamam API and insert relevant results into supplied Item object
*/
void Pudding::incorporate_edamam_api(Item& item)
{
log("checking Edamam API");
/* build API url by concatenating relevant values into query string */
std::stringstream url;
url << "https://api.edamam.com/api/food-database/v2/parser?upc=" << item.get_upc() << "&app_id=" <<
get_configuration()["api"]["edamam-app-id"].get<std::string>() << "&app_key=" <<
get_configuration()["api"]["edamam-app-key"].get<std::string>();
nlohmann::json json = json_from_url(url.str());
/* test that should determine if a Edamam response has food data */
if (json.contains("hints") && json["hints"][0].contains("food"))
{
nlohmann::json food = json["hints"][0]["food"];
if (food.value("image", "") != "")
{
std::string url = food["image"];
std::shared_ptr<GLuint> texture_id = texture_from_image_url(url);
if (texture_id != nullptr)
{
item.add_image_texture(texture_id);
}
item.set_product_name(food.value("label", ""));
}
save_item_json(json, item, "Edamam_API");
}
else
{
log("no results from Edamam");
}
}
/* Submit a query to the Best Buy API and insert relevant results into supplied Item object
*/
void Pudding::incorporate_best_buy_api(Item& item)
{
log("checking Best Buy API");
/* build API url by concatenating relevant values into query string */
std::stringstream url;
url << "https://api.bestbuy.com/v1/products(upc=" << item.get_upc() << ")?format=json&apiKey=" <<
get_configuration()["api"]["best-buy-api-key"].get<std::string>();
nlohmann::json json = json_from_url(url.str());
/* test that should determine if a Best Buy response has a result */
if (json.contains("total") && json["total"].get<int>() > 0)
{
nlohmann::json product = json["products"][0];
/* look up image (for games this is box art) and "alternate views image" (for games this is a screen shot) */
for (std::string key : {"alternateViewsImage", "image"})
{
if (product.value(key, "") != "")
{
std::string url = product[key];
std::shared_ptr<GLuint> texture_id = texture_from_image_url(url);
if (texture_id != nullptr)
{
item.add_image_texture(texture_id);
}
}
}
item.set_product_name(product.value("name", ""));
save_item_json(json, item, "Best_Buy_API");
}
else
{
log("no results from Best Buy");
}
}
/* Write submitted JSON to file, creating parent directories if necessary, and using item and
* api_name to determine file name prefix
*/
void Pudding::save_item_json(const nlohmann::json& json, const Item& item, const std::string& api_name) const
{
if (get_configuration()["scan"]["json-save"])
{
fs::path path = get_configuration()["scan"]["json-save-directory"];
if (!fs::exists(path))
{
fs::create_directories(path);
}
std::string prefix = api_name;
if (item.get_full_name() != "")
{
prefix += "_" + item.get_full_name();
}
else
{
prefix += "_Unknown";
}
std::replace_if(prefix.begin(), prefix.end(), [](char c) { return !std::isalnum(c); }, '_');
path /= prefix + "_" + item.get_upc() + ".json";
std::ofstream out(path);
out << std::setw(4) << json << std::endl;
log("Saved JSON to " + path.string());
}
else
{
SDL_LogWarn(SDL_LOG_CATEGORY_CUSTOM, "not saving JSON, saving disabled by configuration");
}
}
/* Download the JSON data at the submitted URL, and return it as a JSON object
*/
nlohmann::json Pudding::json_from_url(const std::string& url, const std::vector<std::string>& headers)
{
std::vector<std::uint8_t> storage;
curl_get_bytes(url, storage, headers);
nlohmann::json json = nlohmann::json::parse(storage);
std::stringstream json_formatted;
json_formatted << std::setw(4) << json << std::endl;
debug(json_formatted.str());
return json;
}
/* Store the byte buffer from the submitted URL downloaded by cURL into the supplied storage vector
*/
void Pudding::curl_get_bytes(const std::string& url, std::vector<std::uint8_t>& storage, const std::vector<std::string>& headers)
{
CURL *curl;
CURLcode result;
result = curl_global_init(CURL_GLOBAL_DEFAULT);
if (result != CURLE_OK)
{
std::cout << "curl initialization failed " << curl_easy_strerror(result) << std::endl;
}
else
{
curl = curl_easy_init();
if (curl)
{
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, Pudding::curl_write_response);
std::vector<std::uint8_t> food_barcode_response;
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &storage);
curl_easy_setopt(curl, CURLOPT_USERAGENT, get_configuration()["api"]["user-agent"].get<std::string>().c_str());
struct curl_slist* list = nullptr;
if (headers.size() > 0)
{
for (const std::string& header : headers)
{
list = curl_slist_append(list, header.c_str());
}
}
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, list);
result = curl_easy_perform(curl);
curl_slist_free_all(list);
if (result != CURLE_OK)
{
std::cout << "curl request failed " << curl_easy_strerror(result) << std::endl;
}
}
else
{
std::cout << "curl initialization failed" << std::endl;
}
curl_easy_cleanup(curl);
}
curl_global_cleanup();
}
/* This callback will be called by cURL when it has a response char buffer. The chars will be inserted into the storage
* vector pointed to by the storage parameter.
*/
size_t Pudding::curl_write_response(std::uint8_t* buffer, size_t size, size_t count, std::vector<std::uint8_t>* storage)
{
size_t total_size = size * count;
storage->insert(storage->end(), buffer, buffer + total_size);
return total_size;
}
/* Allocate storage for a texture, copy the cURL response data into the storage, and return the ID that corresponds to the GL texture
*/
std::shared_ptr<GLuint> Pudding::texture_from_image_url(const std::string& url)
{
log("looking up image at " + url);
std::vector<std::uint8_t> storage;
curl_get_bytes(url, storage);
if (!storage.empty())
{
debug("received image data");
/* load as an SDL surface to translate image format into pixel data, flip, and get dimensions */
SDL_RWops* rw = SDL_RWFromConstMem(storage.data(), storage.size());
SDL_Surface* surface = IMG_Load_RW(rw, 0);
SDL_Surface* flipped_surface = rotozoomSurfaceXY(surface, 0, 1, -1, 0);
std::ostringstream message;
message << "image is " << flipped_surface->w << "x" << flipped_surface->h;
log(message.str());
/* generate a GL texture, allocate space and set properties */
std::shared_ptr<GLuint> texture_id = std::shared_ptr<GLuint>(new GLuint, Pudding::destroy_texture);
glGenTextures(1, texture_id.get());
glBindTexture(GL_TEXTURE_2D, *texture_id);
glTexStorage2D(GL_TEXTURE_2D, 1, GL_RGB8, flipped_surface->w, flipped_surface->h);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
/* copy image bytes into GL texture */
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, surface->w, surface->h, GL_RGBA, GL_UNSIGNED_BYTE, flipped_surface->pixels);
log_gl_errors();
SDL_FreeSurface(surface);
SDL_FreeSurface(flipped_surface);
return texture_id;
}
else
{
SDL_LogWarn(SDL_LOG_CATEGORY_CUSTOM, "image url returned no data");
return nullptr;
}
}
/* Call GL's delete texture function, and print a debug statement for testing. This is defined as a static member
* function and uses the SDL logging function instead of the inherited logging functions from Node since the object
* may not be allocated at destruction time (?)
*/
void Pudding::destroy_texture(GLuint* texture_id)
{
/* not sure why SDL_Log works here but SDL_LogDebug and SDL_LogInfo don't */
SDL_Log("destroying texture ID %i", *texture_id);
glDeleteTextures(1, texture_id);
}
/* Change the currently selected item */
void Pudding::increment_item_index(int increment)
{
current_item_index = sb::mod(current_item_index + increment, static_cast<int>(items.size()));
}
Item& Pudding::get_current_item()
{
return items[current_item_index];
}
/* Returns true if item display is toggled on and there is at least one item to display */
bool Pudding::item_display_active() const
{
return show_item && items.size() > 0;
}
/* Read pixels from the camera into a GL texture. This function is meant to be launched in a separate thread,
* so it will use its own GL context to load the pixels into either the front or back texture buffer, depending
* on which is not currently in use */
int Pudding::capture_frame(void* game)
{
Pudding* pudding = reinterpret_cast<Pudding*>(game);
/* Make the thread context the current context (not sure what that means, but it doesn't seem to conflict
* with the main rendering context) */
if (SDL_GL_MakeCurrent(pudding->window, pudding->capture_frame_thread_context) < 0)
{
pudding->log("error making thread context current");
}
else
{
if (pudding->capture.isOpened())
{
cv::Mat frame;
pudding->capture.read(frame);
if (!frame.empty())
{
/* rotate the opencv matrix 180 to work with opengl coords */
cv::flip(frame, frame, -1);
/* use whichever texture ID is not being used by the main rendering thread */
GLuint texture_id = pudding->capture_texture_id == pudding->capture_texture_front_buffer_id ?
pudding->capture_texture_back_buffer_id : pudding->capture_texture_front_buffer_id;
/* bind texture for accepting pixel data */
glBindTexture(GL_TEXTURE_2D, texture_id);
/* fill texture memory with last frame's pixels */
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, frame.cols, frame.rows, GL_BGR, GL_UNSIGNED_BYTE, frame.ptr());
pudding->capture_texture_id = texture_id;
if (pudding->get_configuration()["scan"]["enabled"])
{
/* convert to gray and scan with zbar */
cv::cvtColor(frame, frame, cv::COLOR_BGR2GRAY);
zbar::Image query_image(frame.cols, frame.rows, "Y800", static_cast<void*>(frame.data), frame.cols * frame.rows);
int result = pudding->image_scanner.scan(query_image);
if (result > 0)
{
for (zbar::Image::SymbolIterator symbol = query_image.symbol_begin(); symbol != query_image.symbol_end(); ++symbol)
{
std::stringstream message;
message << "camera scanned " << symbol->get_type_name() << " symbol " << symbol->get_data();
pudding->log(message.str());
pudding->current_camera_barcode = symbol->get_data();
pudding->current_barcode = pudding->current_camera_barcode;
}
}
query_image.set_data(nullptr, 0);
}
}
frame.release();
}
SDL_GL_MakeCurrent(pudding->window, nullptr);
}
pudding->reading_capture_frame = false;
return 0;
}
/* Update parameters and draw the screen */
void Pudding::update()
{
/* launch the camera capture thread if it is not currently running */
if (capture.isOpened() && !reading_capture_frame)
{
SDL_Thread* capture_thread = SDL_CreateThread(Pudding::capture_frame, "capture frame", reinterpret_cast<void*>(this));
if (capture_thread == nullptr)
{
log("could not create capture thread");
}
else
{
reading_capture_frame = true;
}
}
/* if the config is set to refresh automatically, there may be a new barcode available */
if (current_config_barcode != get_configuration()["scan"]["barcode"])
{
current_config_barcode = get_configuration()["scan"]["barcode"];
current_barcode = current_config_barcode;
std::stringstream message;
message << "read new barcode from config " << current_barcode;
log(message.str());
}
/* viewport box will be used to tell GL where to draw */
Box viewport_box = window_box(true);
/* shrink viewport if item texture or camera will be displayed */
if (item_display_active() || capture.isOpened())
{
viewport_box.drag_bottom(0.3f * viewport_box.height());
}
glViewport(viewport_box.left(), viewport_box.bottom(), viewport_box.width(), viewport_box.height());
glDisable(GL_DEPTH_TEST);
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
/* switch to flat shader for background */
glUseProgram(flat_program);
/* disable pudding attributes and enable rectangle attributes */
glDisableVertexAttribArray(2);
glDisableVertexAttribArray(3);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
GLint base_texture_location = glGetUniformLocation(flat_program, "base_texture");
glUniform1i(base_texture_location, 0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, background_texture_id);
/* set blend to modify white part of background, color passed is in HSV format */
GLint blend_min_hsv_location = glGetUniformLocation(flat_program, "blend_min_hsv");
float hue = std::abs(std::abs(model[0][0]) - 0.5) * 2;
glUniform3f(blend_min_hsv_location, hue, 0.9, 1);
/* draws rectangle vertices and rectangle texture using UV coords */
glDrawArrays(GL_TRIANGLES, 0, 6);
/* draw pudding model using MVP shader */
glUseProgram(mvp_program);
glUniform1f(time_uniform_location, SDL_GetTicks() / 1000.0f);
/* calculate the transformation matrix for displaying pudding in viewport */
model = glm::rotate(model, weight(get_configuration()["pudding"]["rotation-speed"].get<float>()), Y_UNIT_NORMAL_3D);
projection = glm::perspective(
glm::radians(40.0f * 1 / viewport_box.aspect()), viewport_box.aspect(), 0.1f, 100.0f);
mvp = projection * VIEW_MATRIX * model;
/* pass the mvp matrix to the shader */
glUniformMatrix4fv(mvp_uniform_location, 1, GL_FALSE, &mvp[0][0]);
/* disable rectangle attributes and enable pudding attributes */
glDisableVertexAttribArray(0);
glDisableVertexAttribArray(1);
glEnableVertexAttribArray(2);
if (items.size() == 0)
{
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
glEnableVertexAttribArray(3);
}
else
{
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
glEnableVertexAttribArray(4);
GLuint pudding_texture_location = glGetUniformLocation(mvp_program, "pudding_texture");
glUniform1i(pudding_texture_location, 0);
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, *get_current_item().get_active_image_texture().get());
}
/* draw pudding model */
glEnable(GL_DEPTH_TEST);
glDrawArrays(GL_TRIANGLES, 0, pudding_vertices.size());
/* regular fill mode enabled for all other drawing */
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
/* only do more drawing if items are downloaded or camera is enabled */
if (item_display_active() || capture.isOpened())
{
/* switch to flat shader for item and camera */
glUseProgram(flat_program);
/* disable pudding attributes and enable rectangle attributes */
glDisableVertexAttribArray(2);
glDisableVertexAttribArray(3);
glEnableVertexAttribArray(0);
glEnableVertexAttribArray(1);
glDisable(GL_DEPTH_TEST);
/* just need to set these once since we're drawing one texture per viewport */
GLint base_texture_location = glGetUniformLocation(flat_program, "base_texture");
glUniform1i(base_texture_location, 0);
glActiveTexture(GL_TEXTURE0);
/* move viewport to the bottom of screen */
viewport_box.top(viewport_box.bottom(), true);
viewport_box.bottom(window_box(true).bottom(), true);
/* reset blend to display the original texture colors */
GLint blend_min_location = glGetUniformLocation(flat_program, "blend_min_hsv");
glUniform3f(blend_min_location, 1, 0, 1);
/* draw the current item image if we're supposed to */
if (item_display_active())
{
/* shrink viewport to half size if camera will also be displayed */
if (capture.isOpened())
{
viewport_box.left(viewport_box.cx(), true);
}
glViewport(viewport_box.left(), viewport_box.bottom(), viewport_box.width(), viewport_box.height());
glBindTexture(GL_TEXTURE_2D, *get_current_item().get_active_image_texture().get());
/* draws rectangle vertices and rectangle texture using UV coords */
glDrawArrays(GL_TRIANGLES, 0, 6);
}
/* draw the camera if the camera has been opened */
if (capture.isOpened())
{
viewport_box.left(window_box(true).left());
glViewport(viewport_box.left(), viewport_box.bottom(), viewport_box.width(), viewport_box.height());
/* bind texture for drawing */
glBindTexture(GL_TEXTURE_2D, capture_texture_id);
/* draws rectangle vertices and rectangle texture using UV coords */
glDrawArrays(GL_TRIANGLES, 0, 6);
}
}
SDL_GL_SwapWindow(get_window());
log_gl_errors();
/* add a new item if a new barcode was scanned or entered */
if (current_barcode != previous_barcode)
{
add_item(current_barcode);
previous_barcode = current_barcode;
}
}