gunkiss/src/Pudding.cpp

1163 lines
49 KiB
C++

/* _______________ ,-------------------------------------------------------------------.
//`````````````\\ \ \
//~~~~~~~~~~~~~~~\\ \ by @ohsqueezy & @sleepin \
//=================\\ \ [ohsqueezy.itch.io] [sleepin.itch.io] \
// \\ \ \
// \\ \ code released under zlib license [git.nugget.fun/nugget/gunkiss] \
// ☆ GUNKISS ☆ \\ \ \
//_________________________\\ `-------------------------------------------------------------------'
Generate a custom pudding from food product UPC codes and help a pair of rats take over the video game industry, using
their extraterrestrial ability to turn trash into performance enhancing drug puddings that enable business professionals
to predict the stock market with supernatural accuracy.
*/
#include "Pudding.hpp"
/* Launch the Pudding instance's mainloop */
int main()
{
Pudding pudding = Pudding();
pudding.run();
pudding.quit();
return 0;
}
/* Initialize a Pudding instance */
Pudding::Pudding()
{
/* subscribe to command events */
get_delegate().subscribe(&Pudding::respond, this);
get_delegate().subscribe(&Pudding::respond, this, SDL_MOUSEMOTION);
get_delegate().subscribe(&Pudding::respond, this, SDL_MOUSEBUTTONDOWN);
/* initialize a zbar image scanner for reading barcodes of any format */
image_scanner.set_config(zbar::ZBAR_NONE, zbar::ZBAR_CFG_ENABLE, 1);
/* set up pudding model */
nlohmann::json pudding = get_configuration()["pudding"];
load_pudding_model(pudding["top-radius"], pudding["base-radius"], pudding["ring-vertex-count"], pudding["layer-count"],
pudding["y-range"][0], pudding["y-range"][1], pudding["gradient-position"]);
/* loading GL context instead of SDL context for 3D */
load_gl_context();
load_tiles();
load_pads();
/* Load a pointer cursor from the system library that will be freed automatically */
poke = std::shared_ptr<SDL_Cursor>(SDL_CreateSystemCursor(SDL_SYSTEM_CURSOR_HAND), SDL_FreeCursor);
}
/* Assign vertices, colors and texture UV coordinates to the pudding model */
void Pudding::load_pudding_model(float top_radius, float base_radius, int ring_vertex_count, int layer_count, float min_y,
float max_y, float gradient_position)
{
size_t ii;
const glm::vec3 *layer_top_color, *layer_bottom_color;
const glm::vec2 *start_vertex, *end_vertex;
float layer_top_y, layer_top_percent, layer_base_y, layer_base_percent, u_step = 1.0f / ring_vertex_count, ring_start_vertex_u;
std::vector<glm::vec2> layer_top_ring, layer_base_ring;
layer_top_ring.reserve(ring_vertex_count);
layer_base_ring.reserve(ring_vertex_count);
/* y coordinates of each ring of vertices in the pudding */
const std::map<float, float> y_coords = sb::range_percent_count(max_y, min_y, layer_count + 1);
/* loop through layers by looking at each layer's top and bottom rings simultaneously */
for (
auto layer_top_entry = y_coords.begin(), layer_base_entry = ++y_coords.begin();
layer_base_entry != y_coords.end();
layer_top_entry++, layer_base_entry++
)
{
layer_top_y = layer_top_entry->second;
layer_top_percent = layer_top_entry->first;
layer_base_y = layer_base_entry->second;
layer_base_percent = layer_base_entry->first;
layer_top_ring.clear();
layer_base_ring.clear();
sb::points_on_circle(layer_top_ring, ring_vertex_count, layer_top_percent * (base_radius - top_radius) + top_radius);
sb::points_on_circle(layer_base_ring, ring_vertex_count, layer_base_percent * (base_radius - top_radius) + top_radius);
/* layers above gradient position are brown, layers below are yellow, and the layer that contains gradient positon
* is a gradient from brown to yellow */
if (layer_top_percent <= gradient_position && layer_base_percent > gradient_position)
{
layer_top_color = &PUDDING_BROWN;
layer_bottom_color = &PUDDING_YELLOW;
}
else if (layer_top_percent <= gradient_position)
{
layer_top_color = &PUDDING_BROWN;
layer_bottom_color = &PUDDING_BROWN;
}
else
{
layer_top_color = &PUDDING_YELLOW;
layer_bottom_color = &PUDDING_YELLOW;
}
/* u coordinate will increase toward 1.0f as we go around the ring */
ring_start_vertex_u = 0.0f;
for (ii = 0; ii < layer_top_ring.size(); ii++)
{
/* triangle that includes top two vertices and first base vertex */
start_vertex = &layer_top_ring[ii];
end_vertex = &layer_top_ring[(ii + 1) % layer_top_ring.size()];
pudding_model["position"]->add(start_vertex->x, layer_top_y, start_vertex->y);
pudding_model["uv"]->add(ring_start_vertex_u, layer_top_percent);
pudding_model["position"]->add(end_vertex->x, layer_top_y, end_vertex->y);
pudding_model["uv"]->add(ring_start_vertex_u + u_step, layer_top_percent);
pudding_model["color"]->extend(*layer_top_color, 2);
pudding_model["position"]->add(layer_base_ring[ii].x, layer_base_y, layer_base_ring[ii].y);
pudding_model["uv"]->add(ring_start_vertex_u, layer_base_percent);
pudding_model["color"]->add(*layer_bottom_color);
/* triangle that includes bottom two vertices and second top vertex */
start_vertex = &layer_base_ring[ii];
pudding_model["position"]->add(start_vertex->x, layer_base_y, start_vertex->y);
pudding_model["uv"]->add(ring_start_vertex_u, layer_base_percent);
pudding_model["color"]->add(*layer_bottom_color);
pudding_model["position"]->add(end_vertex->x, layer_top_y, end_vertex->y);
pudding_model["uv"]->add(ring_start_vertex_u + u_step, layer_top_percent);
pudding_model["color"]->add(*layer_top_color);
end_vertex = &layer_base_ring[(ii + 1) % layer_base_ring.size()];
pudding_model["position"]->add(end_vertex->x, layer_base_y, end_vertex->y);
pudding_model["uv"]->add(ring_start_vertex_u + u_step, layer_base_percent);
pudding_model["color"]->add(*layer_bottom_color);
ring_start_vertex_u += u_step;
}
}
pudding_triangle_vertex_count = pudding_model["position"]->count();
/* process the top and bottom of pudding, filling each face with a triangle fan */
float y = max_y;
const glm::vec3* face_color = &PUDDING_BROWN;
Box texture_box = Box({0, 0}, {1, 1});
for (float radius : {top_radius, base_radius})
{
/* first point in a GL_TRIANGLE_FAN is the center */
pudding_model["position"]->add(0.0f, y, 0.0f);
pudding_model["uv"]->add(0.0f, 0.0f);
layer_top_ring.clear();
sb::points_on_circle(layer_top_ring, ring_vertex_count, radius);
/* loop through points on the face */
for (ii = 0; ii < layer_top_ring.size(); ii++)
{
start_vertex = &layer_top_ring[ii];
/* for GL_TRIANGLE_FAN we just need to add an outer vertex */
pudding_model["position"]->add(start_vertex->x, y, start_vertex->y);
pudding_model["uv"]->add(*start_vertex);
/* connect the ring on the last vertex */
if (ii == layer_top_ring.size() - 1)
{
end_vertex = &layer_top_ring[(ii + 1) % layer_top_ring.size()];
pudding_model["position"]->add(end_vertex->x, y, end_vertex->y);
pudding_model["uv"]->add(*end_vertex);
}
}
/* single color for the entire layer_top_ring */
pudding_model["color"]->extend(*face_color, layer_top_ring.size() + 2);
y = min_y;
face_color = &PUDDING_YELLOW;
}
pudding_fan_vertex_count = (pudding_model["position"]->count() - pudding_triangle_vertex_count) / 2;
}
/* Create GL context via super class and load vertices, UV data, and shaders */
void Pudding::load_gl_context()
{
super::load_gl_context();
/* Create another GL context for loading camera frame textures */
if ((capture_frame_thread_context = SDL_GL_CreateContext(window)) == nullptr)
{
sb::Log::log("could not create capture frame thread context");
}
/* Generate a vertex array object ID, bind it as current (requirement of OpenGL) */
vao.generate();
vao.bind();
/* Generate ID for the vertex buffer object that will hold all vertex data. Since we're using one buffer, data
* will be copied in one after the other, offset to after the previous data location. The same buffer offset will
* be passed to the vertex attributes for each data. */
vbo.generate();
vbo.bind();
/* Load two shader programs, one for rendering the flat objects, and one for rendering the 3D model. Load and configure
* the flat shader program first. */
GLuint vertex_shader = load_shader("src/flat.vert", GL_VERTEX_SHADER);
GLuint fragment_shader = load_shader("src/flat.frag", GL_FRAGMENT_SHADER);
flat_program = glCreateProgram();
glAttachShader(flat_program, vertex_shader);
glAttachShader(flat_program, fragment_shader);
Plane::position->bind(0, flat_program, "in_position");
Plane::uv->bind(1, flat_program, "vertex_uv");
/* load, configure and link the 3D world program */
vertex_shader = load_shader("src/mvp.vert", GL_VERTEX_SHADER);
fragment_shader = load_shader("src/mvp.frag", GL_FRAGMENT_SHADER);
mvp_program = glCreateProgram();
glAttachShader(mvp_program, vertex_shader);
glAttachShader(mvp_program, fragment_shader);
pudding_model.attributes("position")->bind(2, mvp_program, "vertex_position");
pudding_model.attributes("uv")->bind(3, mvp_program, "vertex_uv");
pudding_model.attributes("color")->bind(4, mvp_program, "vertex_color");
sb::Log::gl_errors("after loading shaders");
/* Fill VBO with attribute data */
vbo.allocate(background.size() + pudding_model.size(), GL_STATIC_DRAW);
vbo.add(*Plane::position);
vbo.add(*Plane::uv);
vbo.add(*pudding_model.attributes("uv"));
vbo.add(*pudding_model.attributes("position"));
vbo.add(*pudding_model.attributes("color"));
sb::Log::gl_errors("after filling VBO");
/* link shaders */
link_shader(flat_program);
link_shader(mvp_program);
sb::Log::gl_errors("after linking");
/* store uniform locations after linking */
uniform["flat"]["texture"] = glGetUniformLocation(flat_program, "base_texture");
uniform["flat"]["time"] = glGetUniformLocation(flat_program, "time");
uniform["flat"]["scroll"] = glGetUniformLocation(flat_program, "scroll");
uniform["flat"]["blend"] = glGetUniformLocation(flat_program, "blend_min_hsv");
uniform["flat"]["transformation"] = glGetUniformLocation(flat_program, "transformation");
uniform["mvp"]["mvp"] = glGetUniformLocation(mvp_program, "mvp");
uniform["mvp"]["time"] = glGetUniformLocation(mvp_program, "time");
uniform["mvp"]["effect"] = glGetUniformLocation(mvp_program, "effect");
uniform["mvp"]["uv transformation"] = glGetUniformLocation(mvp_program, "uv_transformation");
uniform["mvp"]["coordinate bound"] = glGetUniformLocation(mvp_program, "coordinate_bound");
uniform["mvp"]["pudding texture"] = glGetUniformLocation(mvp_program, "pudding_texture");
/* enable alpha rendering */
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
sb::Log::gl_errors("after uniform locations");
}
/* Read every jpg in the folder at tile path into a GL texture and associate with the background object. */
void Pudding::load_tiles()
{
for (fs::path path : sb::glob(get_configuration()["resource"]["tile-path"].get<fs::path>() / ".*.jpg"))
{
sb::Texture texture {path};
texture.load();
background.texture(texture, path);
}
}
/* Load every png in the button path as a Texture and add to a map. */
void Pudding::load_pads()
{
for (fs::path path : sb::glob(get_configuration()["resource"]["button-path"].get<fs::path>() / ".*.png"))
{
labels[path.stem()] = sb::Texture(path);
labels[path.stem()].load();
}
nlohmann::json interface = get_configuration()["interface"];
camera_button.texture(labels["scan"]);
camera_button.translation({interface["main-button-single-x"], interface["main-button-y"]});
camera_button.scale(interface["main-button-scale"], window_box().aspect());
inventory_button.texture(labels["inventory"]);
inventory_button.translation({interface["main-button-double-x"], interface["main-button-y"]});
inventory_button.scale(interface["main-button-scale"], window_box().aspect());
previous_button.texture(labels["arrow"]);
previous_button.translation(glm::vec2({-1, 1}) * interface["arrow-button-location"].get<glm::vec2>());
previous_button.scale(interface["arrow-button-scale"], window_box().aspect());
next_button.texture(labels["arrow"]);
next_button.translation(interface["arrow-button-location"]);
next_button.scale(interface["arrow-button-scale"], window_box().aspect());
next_button.rotation(glm::radians(180.0f));
}
/* Try to create cv::VideoCapture object using device ID #0. If successful, this will create GL texture IDs and storage
* for the camera frames, so it must be called after GL context has been created. Two textures will be created, so they
* can be used as a double buffer.
*/
void Pudding::initialize_camera()
{
/* initialize an opencv capture device for getting images from an attached camera */
int device_id = 0;
capture.open(device_id);
std::ostringstream message;
if (capture.isOpened())
{
message << "opened and initialized " << capture.get(cv::CAP_PROP_FRAME_WIDTH) << "x" <<
capture.get(cv::CAP_PROP_FRAME_HEIGHT) << ", " << capture.get(cv::CAP_PROP_FPS) <<
"fps video capture device ID #" << device_id << " using " << capture.getBackendName();
/* generate two textures that will store the video frames with the intention of double buffering them
* for threaded texture loading */
camera_view.generate({capture.get(cv::CAP_PROP_FRAME_WIDTH), capture.get(cv::CAP_PROP_FRAME_HEIGHT)});
}
else
{
message << "failed to open video capture device ID #" << device_id;
}
sb::Log::log(message);
}
/* Respond to command events */
void Pudding::respond(SDL_Event& event)
{
if (get_delegate().compare(event, "up"))
{
item_carousel.next(items);
}
else if (get_delegate().compare(event, "right"))
{
if (items.size() > 0)
{
current_item().next_texture();
}
}
else if (get_delegate().compare(event, "down"))
{
item_carousel.previous(items);
}
else if (get_delegate().compare(event, "left"))
{
if (items.size() > 0)
{
current_item().previous_texture();
}
}
else if (get_delegate().compare(event, "toggle-camera"))
{
camera_switch.toggle();
}
else if (get_delegate().compare(event, "toggle-item"))
{
show_item = !show_item;
}
/* The effect command switches the active effect to the next in the list, wrapping around at the end */
else if (get_delegate().compare(event, "effect"))
{
effect_id = ++effect_id % EFFECT_COUNT;
glUseProgram(mvp_program);
glUniform1i(uniform["mvp"]["effect"], effect_id);
}
else if (get_delegate().compare(event, "tile"))
{
background.next();
}
/* Mouse interface */
else if (event.type == SDL_MOUSEMOTION || event.type == SDL_MOUSEBUTTONDOWN)
{
/* Get the secondary window viewport dimensions in NDC and pixel resolution for sizing the arrow buttons and transforming
* the mouse coordinates. */
Box viewport_ndc = sb::Display::ndc;
/* Drag viewport completely closed to the bottom of the screen */
viewport_ndc.top(viewport_ndc.bottom(), true);
nlohmann::json interface = get_configuration()["interface"];
/* Drag viewport back up the height of the pop-up window */
viewport_ndc.drag_top(interface["pop-up-viewport-height"]);
/* Get the viewport in pixel resolution to size the buttons to be square inside the viewport */
Box viewport_pixel = get_display().ndc_to_pixel(viewport_ndc);
/* Get mouse coordinates in NDC and pixel resolution in both main window and secondary */
glm::vec2 mouse_pixel = event.type == SDL_MOUSEBUTTONDOWN ? glm::vec2{event.button.x, event.button.y} :
glm::vec2{event.motion.x, event.motion.y};
glm::vec2 mouse_ndc {
float(mouse_pixel.x) / window_box().width() * 2.0f - 1.0f, (1.0f - float(mouse_pixel.y) / window_box().height()) * 2.0f - 1.0f
};
glm::vec2 mouse_viewport_ndc {
mouse_ndc.x, (1.0f - (float(mouse_pixel.y) - float(viewport_pixel.top())) / viewport_pixel.height()) * 2.0f - 1.0f
};
bool over_camera_button = !capture.isOpened() && !item_display_active() && camera_button.collide(mouse_ndc),
over_inventory_button = items.size() > 0 && !item_display_active() && !capture.isOpened() && inventory_button.collide(mouse_ndc),
over_close_area = (capture.isOpened() || item_display_active()) && get_display().ndc_subsection(main_viewport).collide(mouse_ndc),
over_previous_button = item_display_active() && previous_button.collide(mouse_viewport_ndc),
over_next_button = item_display_active() && next_button.collide(mouse_viewport_ndc);
/* Check for collisions with anything clickable */
if (over_camera_button || over_inventory_button || over_close_area || over_previous_button || over_next_button)
{
/* Set cursor to pokey finger */
if (SDL_GetCursor() != poke.get())
{
SDL_SetCursor(poke.get());
}
/* Respond to a click */
if (event.type == SDL_MOUSEBUTTONDOWN)
{
if (over_camera_button || over_inventory_button || over_close_area)
{
/* Reset cursor to default arrow */
SDL_SetCursor(SDL_GetDefaultCursor());
if (over_camera_button)
{
camera_switch.connect();
}
else if (over_inventory_button)
{
show_item = true;
/* Scale buttons according to viewport that is going to open */
next_button.scale(interface["arrow-button-scale"], viewport_pixel.aspect());
previous_button.scale(interface["arrow-button-scale"], viewport_pixel.aspect());
}
else if (over_close_area)
{
camera_switch.disconnect();
show_item = false;
}
}
else
{
/* Handle arrow buttons */
if (over_next_button)
{
if (current_item().at_last())
{
item_carousel.next(items);
current_item().to_first();
}
else
{
current_item().next_texture();
}
}
else
{
if (current_item().at_first())
{
item_carousel.previous(items);
current_item().to_last();
}
else
{
current_item().previous_texture();
}
}
}
}
}
else if (SDL_GetCursor() == poke.get())
{
SDL_SetCursor(SDL_GetDefaultCursor());
}
}
}
/* Build an Item object by submitting the upc parameter to multiple APIs and taking
* relevant results from each. Result JSON will be saved if saving is enabled in the global
* configuration
*/
void Pudding::add_item(const std::string& upc)
{
Item item;
item.upc(upc);
if (get_configuration()["api"]["open-food-enabled"])
{
incorporate_open_api(item, OPEN_FOOD_API_URL);
}
if (get_configuration()["api"]["open-products-enabled"])
{
incorporate_open_api(item, OPEN_PRODUCTS_API_URL);
}
if (get_configuration()["api"]["nutronix-enabled"])
{
incorporate_nutronix_api(item);
}
if (get_configuration()["api"]["edamam-enabled"])
{
incorporate_edamam_api(item);
}
if (get_configuration()["api"]["best-buy-enabled"])
{
incorporate_best_buy_api(item);
}
if (get_configuration()["api"]["google-books-enabled"])
{
incorporate_google_books_api(item);
}
if (item.texture_count() > 0)
{
items.push_back(item);
/* Set item index to end so newest item will display. */
item_carousel.end(items);
/* Move the camera button away from center to make room for inventory button if this is the first item added. */
if (items.size() == 1)
{
const nlohmann::json& interface = get_configuration()["interface"];
camera_button.translation({-1.0f * interface["main-button-double-x"].get<float>(), interface["main-button-y"]});
}
}
else
{
std::ostringstream message;
message << "discarding item, no images found for " << upc;
sb::Log::log(message);
}
}
/* Look for item upc in the Open Food/Products API and use the result to fill out item properties if found. */
void Pudding::incorporate_open_api(Item& item, const std::string& api_url)
{
std::ostringstream checking_message;
checking_message << "checking " << api_url;
sb::Log::log(checking_message);
nlohmann::json json = json_from_url(api_url + item.upc());
/* test that should determine if an Open Food API response is not empty */
if (json.value("status", 0) && json.contains("product"))
{
if (json["product"].value("image_url", "") != "")
{
std::string image_url = json["product"]["image_url"];
sb::Texture texture = texture_from_image_url(image_url);
if (texture.generated())
{
item.texture(texture, image_url);
}
}
item.brand_name(json["product"].value("brands", ""));
item.product_name(json["product"].value("product_name", ""));
if (api_url == OPEN_FOOD_API_URL)
{
save_item_json(json, item, "Open_Food_API");
}
else if (api_url == OPEN_PRODUCTS_API_URL)
{
save_item_json(json, item, "Open_Products_API");
}
}
else
{
std::ostringstream results_message;
results_message << "no results from " << api_url;
sb::Log::log(results_message);
}
}
/* Look for item upc in the Nutronix API, and use the result to fill out item properties if found
*/
void Pudding::incorporate_nutronix_api(Item& item)
{
sb::Log::log("checking Nutronix API");
/* Nutronix requires API keys in headers for validation */
nlohmann::json json = json_from_url(
NUTRONIX_API_URL + item.upc(), {
"x-app-id: " + get_configuration()["api"]["nutronix-app-id"].get<std::string>(),
"x-app-key: " + get_configuration()["api"]["nutronix-app-key"].get<std::string>()
});
/* test that should determine if a Nutronix response is not empty */
if (!(json.contains("message") && json["message"] == NUTRONIX_NOT_FOUND))
{
nlohmann::json food = json["foods"][0];
if (food.contains("photo") && food["photo"].value("thumb", "") != "")
{
std::string url = food["photo"]["thumb"];
sb::Log::log("adding image listed in Nutronix API at " + url);
sb::Texture texture = texture_from_image_url(url);
if (texture.generated())
{
item.texture(texture, url);
}
}
item.brand_name(food.value("brand_name", ""));
item.product_name(food.value("food_name", ""));
save_item_json(json, item, "Nutronix_API");
}
else
{
sb::Log::log("no results from Nutronix");
}
}
/* Submit a query to Edamam API and insert relevant results into supplied Item object
*/
void Pudding::incorporate_edamam_api(Item& item)
{
sb::Log::log("checking Edamam API");
/* build API url by concatenating relevant values into query string */
std::stringstream url;
url << "https://api.edamam.com/api/food-database/v2/parser?upc=" << item.upc() << "&app_id=" <<
get_configuration()["api"]["edamam-app-id"].get<std::string>() << "&app_key=" <<
get_configuration()["api"]["edamam-app-key"].get<std::string>();
nlohmann::json json = json_from_url(url.str());
/* test that should determine if a Edamam response has food data */
if (json.contains("hints") && json["hints"][0].contains("food"))
{
nlohmann::json food = json["hints"][0]["food"];
if (food.value("image", "") != "")
{
std::string url = food["image"];
sb::Texture texture = texture_from_image_url(url);
if (texture.generated())
{
item.texture(texture, url);
}
item.product_name(food.value("label", ""));
}
save_item_json(json, item, "Edamam_API");
}
else
{
sb::Log::log("no results from Edamam");
}
}
/* Submit a query to the Best Buy API and insert relevant results into supplied Item object
*/
void Pudding::incorporate_best_buy_api(Item& item)
{
sb::Log::log("checking Best Buy API");
/* build API url by concatenating relevant values into query string */
std::stringstream url;
url << "https://api.bestbuy.com/v1/products(upc=" << item.upc() << ")?format=json&apiKey=" <<
get_configuration()["api"]["best-buy-api-key"].get<std::string>();
nlohmann::json json = json_from_url(url.str());
/* test that should determine if a Best Buy response has a result */
if (json.contains("total") && json["total"].get<int>() > 0)
{
nlohmann::json product = json["products"][0];
/* look up image (for games this is box art) and "alternate views image" (for games this is a screen shot) */
for (std::string key : {"alternateViewsImage", "image"})
{
if (product.value(key, "") != "")
{
std::string url = product[key];
sb::Texture texture = texture_from_image_url(url);
if (texture.generated())
{
item.texture(texture, url);
}
}
}
item.product_name(product.value("name", ""));
save_item_json(json, item, "Best_Buy_API");
}
else
{
sb::Log::log("no results from Best Buy");
}
}
/* Look for item upc in the Google Books API and use the result to fill out item properties if found. */
void Pudding::incorporate_google_books_api(Item& item)
{
sb::Log::log("checking Google Books API");
nlohmann::json json = json_from_url(GOOGLE_BOOKS_API_URL + item.upc());
/* test that should determine if a Google Books API response is not empty */
if (json.value<int>("totalItems", 0) > 0 && json.contains("items") && json["items"][0].contains("volumeInfo"))
{
/* book specific section of the JSON */
json = json["items"][0]["volumeInfo"];
/* get the image data */
if (json.contains("imageLinks") && json["imageLinks"].value("thumbnail", "") != "")
{
std::string image_url = json["imageLinks"]["thumbnail"];
sb::Texture texture = texture_from_image_url(image_url);
if (texture.generated())
{
item.texture(texture, image_url);
}
}
if (json.contains("authors"))
{
item.brand_name(json["authors"][0]);
}
item.product_name(json.value("title", ""));
save_item_json(json, item, "Google_Books_API");
}
else
{
sb::Log::log("no results from Google Books API");
}
}
/* Write submitted JSON to file, creating parent directories if necessary, and using item and
* api_name to determine file name prefix
*/
void Pudding::save_item_json(const nlohmann::json& json, const Item& item, const std::string& api_name) const
{
if (get_configuration()["scan"]["json-save"])
{
fs::path path = get_configuration()["scan"]["json-save-directory"];
if (!fs::exists(path))
{
fs::create_directories(path);
}
std::string prefix = api_name;
if (item.full_name() != "")
{
prefix += "_" + item.full_name();
}
else
{
prefix += "_Unknown";
}
std::replace_if(prefix.begin(), prefix.end(), [](char c) { return !std::isalnum(c); }, '_');
path /= prefix + "_" + item.upc() + ".json";
std::ofstream out(path);
out << std::setw(4) << json << std::endl;
sb::Log::log("Saved JSON to " + path.string());
}
else
{
SDL_LogWarn(SDL_LOG_CATEGORY_CUSTOM, "not saving JSON, saving disabled by configuration");
}
}
/* Download the JSON data at the submitted URL, and return it as a JSON object
*/
nlohmann::json Pudding::json_from_url(const std::string& url, const std::vector<std::string>& headers)
{
std::vector<std::uint8_t> storage;
curl_get_bytes(url, storage, headers);
nlohmann::json json = nlohmann::json::parse(storage);
std::stringstream json_formatted;
json_formatted << std::setw(4) << json << std::endl;
sb::Log::log(json_formatted.str(), sb::Log::DEBUG);
return json;
}
/* Store the byte buffer from the submitted URL downloaded by cURL into the supplied storage vector
*/
void Pudding::curl_get_bytes(const std::string& url, std::vector<std::uint8_t>& storage, const std::vector<std::string>& headers) const
{
CURL *curl;
CURLcode result;
result = curl_global_init(CURL_GLOBAL_DEFAULT);
if (result != CURLE_OK)
{
std::cout << "curl initialization failed " << curl_easy_strerror(result) << std::endl;
}
else
{
curl = curl_easy_init();
if (curl)
{
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, Pudding::curl_write_response);
std::vector<std::uint8_t> food_barcode_response;
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &storage);
curl_easy_setopt(curl, CURLOPT_USERAGENT, get_configuration()["api"]["user-agent"].get<std::string>().c_str());
struct curl_slist* list = nullptr;
if (headers.size() > 0)
{
for (const std::string& header : headers)
{
list = curl_slist_append(list, header.c_str());
}
}
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, list);
result = curl_easy_perform(curl);
curl_slist_free_all(list);
if (result != CURLE_OK)
{
std::cout << "curl request failed " << curl_easy_strerror(result) << std::endl;
}
}
else
{
std::cout << "curl initialization failed" << std::endl;
}
curl_easy_cleanup(curl);
}
curl_global_cleanup();
}
/* This callback will be called by cURL when it has a response char buffer. The chars will be inserted into the storage
* vector pointed to by the storage parameter.
*/
size_t Pudding::curl_write_response(std::uint8_t* buffer, size_t size, size_t count, std::vector<std::uint8_t>* storage)
{
size_t total_size = size * count;
storage->insert(storage->end(), buffer, buffer + total_size);
return total_size;
}
/* Allocate storage for a texture, copy the cURL response data into the storage, and return the ID that corresponds to the GL texture
*/
sb::Texture Pudding::texture_from_image_url(const std::string& url) const
{
/* this texture will be returned whether we load pixels into it or not */
sb::Texture texture;
sb::Log::log("looking up image at " + url);
std::vector<std::uint8_t> storage;
curl_get_bytes(url, storage);
if (!storage.empty())
{
sb::Log::log("received image data", sb::Log::DEBUG);
/* get a Texture by passing the bytes through an RW ops which will enable the Texture object to load a Surface */
SDL_RWops* rw = SDL_RWFromConstMem(storage.data(), storage.size());
texture.load(rw);
SDL_RWclose(rw);
}
else
{
SDL_LogWarn(SDL_LOG_CATEGORY_CUSTOM, "image url returned no data");
}
return texture;
}
/* Call GL's delete texture function, and print a debug statement for testing. This is defined as a static member
* function and uses the SDL logging function instead of the inherited logging functions from Node since the object
* may not be allocated at destruction time (?)
*/
void Pudding::destroy_texture(GLuint* texture_id)
{
/* not sure why SDL_Log works here but SDL_LogDebug and SDL_LogInfo don't */
std::ostringstream message;
message << "destroying texture ID " << *texture_id;
sb::Log::log(message);
glDeleteTextures(1, texture_id);
}
/* Return the item currently selected in the inventory */
Item& Pudding::current_item()
{
try
{
return *item_carousel.current(items);
}
catch (const std::out_of_range& exception)
{
std::ostringstream message;
message << "Out of range exception: " << exception.what() << " (Attempting to retrieve an item from empty inventory)";
sb::Log::log(message);
}
}
/* Returns true if item display is toggled on and there is at least one item to display */
bool Pudding::item_display_active() const
{
return show_item && items.size() > 0;
}
/* Read pixels from the camera into a GL texture. This function is meant to be launched in a separate thread,
* so it will use its own GL context to load the pixels into either the front or back texture buffer, depending
* on which is not currently in use */
int Pudding::capture_frame(void* game)
{
time_it("total thread")([&]{
Pudding* pudding = reinterpret_cast<Pudding*>(game);
/* Make the thread context the current context (not sure what that means, but it doesn't seem to conflict
* with the main rendering context) */
if (time_it<int>("make current")([&] { return SDL_GL_MakeCurrent(pudding->window, pudding->capture_frame_thread_context); }) < 0)
{
sb::Log::log("error making thread context current");
}
else
{
if (pudding->capture.isOpened())
{
cv::Mat frame;
time_it("read frame")([&]{
pudding->capture.read(frame);
});
if (!frame.empty())
{
time_it("flip")([&]{
/* rotate the opencv matrix 180 to work with opengl coords */
cv::flip(frame, frame, -1);
});
time_it("load texture")([&]{
/* use whichever texture ID is not being used by the main rendering thread */
sb::Texture& texture = pudding->camera_view.free();
/* bind texture for accepting pixel data */
texture.bind();
/* fill texture memory with last frame's pixels */
texture.load(frame.ptr(), {frame.cols, frame.rows}, GL_BGR, GL_UNSIGNED_BYTE);
});
pudding->camera_view.swap();
if (pudding->get_configuration()["scan"]["enabled"])
{
time_it("gray")([&]{
/* convert to gray and scan with zbar */
cv::cvtColor(frame, frame, cv::COLOR_BGR2GRAY);
});
zbar::Image query_image(frame.cols, frame.rows, "Y800", static_cast<void*>(frame.data), frame.cols * frame.rows);
int result = pudding->image_scanner.scan(query_image);
if (result > 0)
{
time_it("barcode lookup")([&] {
for (zbar::Image::SymbolIterator symbol = query_image.symbol_begin(); symbol != query_image.symbol_end(); ++symbol)
{
std::ostringstream message;
message << "camera scanned " << symbol->get_type_name() << " symbol " << symbol->get_data();
sb::Log::log(message);
pudding->current_camera_barcode = symbol->get_data();
pudding->current_barcode = pudding->current_camera_barcode;
}
});
}
query_image.set_data(nullptr, 0);
}
}
frame.release();
}
SDL_GL_MakeCurrent(pudding->window, nullptr);
sb::Log::gl_errors("in capture thread, after capturing frame");
}
pudding->reading_capture_frame = false;
// using namespace std::chrono_literals;
// std::this_thread::sleep_for(2s);
});
return 0;
}
/* Update parameters and draw the screen */
void Pudding::update()
{
/* number of seconds we've been running for */
float time_seconds = SDL_GetTicks() / 1000.0f;
{
/* launch the camera capture thread if it is not currently running */
if (capture.isOpened() && !reading_capture_frame)
{
SDL_Thread* capture_thread = SDL_CreateThread(Pudding::capture_frame, "capture frame", reinterpret_cast<void*>(this));
if (capture_thread == nullptr)
{
sb::Log::log("could not create capture thread");
}
else
{
reading_capture_frame = true;
SDL_WaitThread(capture_thread, nullptr);
}
}
}
sb::Log::gl_errors("in main thread, after capturing frame");
/* if the config is set to refresh automatically, there may be a new barcode available */
if (current_config_barcode != get_configuration()["scan"]["barcode"])
{
current_config_barcode = get_configuration()["scan"]["barcode"];
current_barcode = current_config_barcode;
std::ostringstream message;
message << "read new barcode from config " << current_barcode;
sb::Log::log(message);
}
/* viewport box will be used to tell GL where to draw */
viewport = window_box(true);
/* shrink viewport if item texture or camera will be displayed */
if (item_display_active() || capture.isOpened())
{
viewport.drag_bottom(0.5f * get_configuration()["interface"]["pop-up-viewport-height"].get<float>() * viewport.height());
}
/* Save the main viewport dimensions */
main_viewport = viewport;
glViewport(viewport);
glDisable(GL_DEPTH_TEST);
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
/* switch to flat shader for background */
glUseProgram(flat_program);
/* flat shader uniforms for BG: time, texture ID, disabled HSV blend, scroll on */
glActiveTexture(GL_TEXTURE0);
glUniform1f(uniform["flat"]["time"], time_seconds);
glUniform1i(uniform["flat"]["texture"], 0);
glUniform3f(uniform["flat"]["blend"], 0.0f, 0.0f, 1.0f);
glUniform1i(uniform["flat"]["scroll"], true);
glUniformMatrix4fv(uniform["flat"]["transformation"], 1, GL_FALSE, &glm::mat4(1)[0][0]);
/* disable pudding attributes and enable background attributes */
pudding_model.disable();
background.enable();
background.current().bind();
/* draws bg vertices and texture */
glDrawArrays(GL_TRIANGLES, 0, background.attributes("position")->count());
/* turn off scrolling */
glUniform1i(uniform["flat"]["scroll"], false);
sb::Log::gl_errors("after background, before pudding");
/* draw pudding model using MVP shader */
glUseProgram(mvp_program);
/* calculate the transformation matrix for displaying pudding in viewport */
model = glm::rotate(model, weight(get_configuration()["pudding"]["rotation-speed"].get<float>()), Y_UNIT_NORMAL_3D);
projection = glm::perspective(
glm::radians(40.0f * 1 / viewport.aspect()), viewport.aspect(), 0.1f, 100.0f);
mvp = projection * VIEW_MATRIX * model;
/* uniforms */
glUniform1f(uniform["mvp"]["time"], time_seconds);
glUniformMatrix4fv(uniform["mvp"]["mvp"], 1, GL_FALSE, &mvp[0][0]);
/* disable bg attributes and enable pudding attributes */
background.disable();
pudding_model.attributes("position")->enable();
if (items.size() == 0)
{
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
// pudding_model.attributes("color")->enable();
}
else
{
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
// pudding_model.attributes("color")->enable();
pudding_model.attributes("uv")->enable();
glUniform1i(uniform["mvp"]["pudding texture"], 0);
glActiveTexture(GL_TEXTURE0);
current_item().current_texture().bind();
}
/* draw pudding model */
glEnable(GL_DEPTH_TEST);
/* draw the sides of the pudding */
glDrawArrays(GL_TRIANGLES, 0, pudding_triangle_vertex_count);
sb::Log::gl_errors("after pudding sides, before pudding top/bottom");
/* enable squircling and draw the top and bottom of pudding */
glUniform1i(uniform["mvp"]["uv transformation"], UV_SQUIRCLE);
glUniform1f(uniform["mvp"]["coordinate bound"], get_configuration()["pudding"]["top-radius"]);
glDrawArrays(GL_TRIANGLE_FAN, pudding_triangle_vertex_count, pudding_fan_vertex_count);
glUniform1f(uniform["mvp"]["coordinate bound"], get_configuration()["pudding"]["base-radius"]);
glDrawArrays(GL_TRIANGLE_FAN, pudding_triangle_vertex_count + pudding_fan_vertex_count, pudding_fan_vertex_count);
/* disable squircling for all other drawing */
glUniform1i(uniform["mvp"]["uv transformation"], UV_NONE);
/* regular fill mode enabled for all other drawing */
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
sb::Log::gl_errors("after pudding, before item or camera view");
/* only do more drawing if items are downloaded or camera is enabled */
if (item_display_active() || capture.isOpened())
{
/* switch to flat shader for item and camera */
glUseProgram(flat_program);
pudding_model.disable();
glDisable(GL_DEPTH_TEST);
/* just need to set these once since we're drawing one texture per viewport */
glUniform1i(uniform["flat"]["texture"], 0);
glActiveTexture(GL_TEXTURE0);
/* move viewport to the bottom of screen */
viewport.top(viewport.bottom(), true);
viewport.bottom(window_box(true).bottom(), true);
/* reset blend to display the original texture colors */
glUniform3f(uniform["flat"]["blend"], 0.0f, 0.0f, 1.0f);
/* draw the current item image if we're supposed to */
if (item_display_active())
{
/* shrink viewport to half size if camera will also be displayed */
if (capture.isOpened())
{
viewport.left(viewport.cx(), true);
}
glViewport(viewport);
current_item().current_texture().bind();
current_item().view().enable();
/* draws rectangle vertices and rectangle texture using UV coords */
glDrawArrays(GL_TRIANGLES, 0, current_item().view().attributes("position")->count());
current_item().view().disable();
/* Draw arrows for cycling through items in inventory */
if (items.size() > 1 || current_item().texture_count() > 1)
{
next_button.draw(uniform["flat"]["transformation"]);
previous_button.draw(uniform["flat"]["transformation"]);
}
}
/* draw the camera if the camera has been opened */
if (capture.isOpened())
{
viewport.left(window_box(true).left());
glViewport(viewport);
/* bind texture for drawing */
glUniformMatrix4fv(uniform["flat"]["transformation"], 1, GL_FALSE, &camera_view.transformation()[0][0]);
camera_view.current().bind();
camera_view.enable();
/* draws rectangle vertices and rectangle texture using UV coords */
glDrawArrays(GL_TRIANGLES, 0, camera_view.attributes("position")->count());
}
}
else
{
/* Draw the camera button if neither the camera or inventory is displayed */
glUseProgram(flat_program);
camera_button.draw(uniform["flat"]["transformation"]);
/* And the inventory button if there are items scanned into the inventory */
if (items.size() > 0)
{
inventory_button.draw(uniform["flat"]["transformation"]);
}
}
SDL_GL_SwapWindow(get_window());
sb::Log::gl_errors("at end of update");
/* add a new item if a new barcode was scanned or entered */
if (current_barcode != previous_barcode)
{
add_item(current_barcode);
previous_barcode = current_barcode;
}
}
/* Construct a Pad using a texture, a translation, a scale, and a callback function. A Pad is a Plane which can be clicked
* to launch an arbitrary user function. It can be sized and placed by setting the translation and scale values. The translation
* is relative to (0.0, 0.0), and the scale is relative to the Plane, which has opposite corners at (-1.0, -1.0) and (1.0, 1.0).
* The texture is the graphic that displays in the Pad location. The callback must be a function that doesn't return a value or
* accept any arguments. */
Pad::Pad(sb::Texture texture, glm::vec2 translation, float scale, float ratio, std::function<void()> on_connect, float rotation)
{
this->texture(texture);
this->translation(translation);
this->scale(scale, ratio);
if (rotation)
{
this->rotation(rotation);
}
this->on_connect(on_connect);
collision_box.invert_y(true);
}
/* Set angle in radians the pad will be rotated. The pad will be rotated around its center. The collision box will not
* change, so the box will not contain the entire pad if the angle is not a multiple of pi/2. The pad's transformation
* matrix will automatically be set to incorporate this rotation transformation. */
void Pad::rotation(float angle)
{
rotation_angle = angle;
transform();
}
/* Set the scale using a factor and ratio that will transform the pad in the X and Y dimensions. The ratio will determine
* how much each axis is scaled. If the ratio is above one, the X-axis's scale will be divided by the ratio. If the ratio
* is below one, the Y-axis's scale will be multiplied by the aspect ratio. If the aspect ratio of the window is given,
* this will force the pad to display as a square, and the ratio will be relative to the shorter axis. The collision box
* will be scaled by the same factors. The pad's transformation matrix will automatically be set to incorporate this
* scale transformation. */
void Pad::scale(float factor, float ratio)
{
scale_factor = factor;
scale_ratio = ratio;
transform();
}
/* Set a translation for the pad object in the X and Y dimension using a 2d vector. The collision box will be moved by the
* same translation. The pad's transformation matrix will automatically be set to incorporate this translation
* transformation. */
void Pad::translation(const glm::vec2& translation)
{
translation_vector = translation;
transform();
}
/* Set the transformation matrix for the pad object by applying the scale to the translation and the rotation to the
* resulting matrix, meaning the transformations will be applied to the pad in the order of: translate, scale, and
* rotate. The collision box will be scaled and moved to fit around the position coordinates that would result from
* applying this transformation to the position coordinates. */
void Pad::transform()
{
glm::vec3 scale { scale_factor, scale_factor, 1.0f };
if (scale_ratio > 1.0f)
{
scale.x /= scale_ratio;
}
else if (scale_ratio < 1.0f)
{
scale.y *= scale_ratio;
}
collision_box.size(2.0f * glm::vec2{scale.x, scale.y}, true);
collision_box.center(translation_vector);
Model::transformation(glm::translate(glm::vec3{translation_vector.x, translation_vector.y, 0.0f}) *
glm::scale(scale) * glm::rotate(rotation_angle, ROTATION_AXIS));
}
/* Set the function that will run when a pad object is clicked. */
void Pad::on_connect(std::function<void()> on_connect)
{
connection.on_connect(on_connect);
}
/* Returns true if the point at position collides with the pad's collision box. */
bool Pad::collide(const glm::vec2& position) const
{
return collision_box.collide(position);
}
void Pad::draw(GLuint uniform_id)
{
glUniformMatrix4fv(uniform_id, 1, GL_FALSE, &transformation()[0][0]);
texture().bind();
enable();
glDrawArrays(GL_TRIANGLES, 0, attributes("position")->count());
disable();
}
void glViewport(Box box)
{
glViewport(box.left(), box.bottom(), box.width(), box.height());
}