gunkiss/src/Pudding.cpp

1297 lines
53 KiB
C++

/* _______________ ,--------------------------------------------------------.
//`````````````\\ \ \
//~~~~~~~~~~~~~~~\\ \ by @ohsqueezy & @sleepin \
//=================\\ \ [ohsqueezy.itch.io] [sleepin.itch.io] \
// \\ \ \
// \\ \ zlib licensed code at [git.nugget.fun/nugget/gunkiss] \
// ☆ GUNKISS ☆ \\ \ \
//_________________________\\ `--------------------------------------------------------'
Generate a custom pudding from food product UPC codes and help a pair of rats take over the video game industry, using
their extraterrestrial ability to turn trash into performance enhancing drug puddings that enable business professionals
to predict the stock market with supernatural accuracy.
*/
#include "Pudding.hpp"
/* Launch the Pudding instance's mainloop */
int main()
{
Pudding pudding = Pudding();
pudding.run();
pudding.quit();
return 0;
}
#ifdef __EMSCRIPTEN__
void flag_frame()
{
new_frame_available = true;
}
void set_heap_offset(int offset)
{
emscripten_heap_offset = offset;
}
#endif
/* Initialize a Pudding instance */
Pudding::Pudding()
{
/* subscribe to command events */
get_delegate().subscribe(&Pudding::respond, this);
get_delegate().subscribe(&Pudding::respond, this, SDL_MOUSEMOTION);
get_delegate().subscribe(&Pudding::respond, this, SDL_MOUSEBUTTONDOWN);
/* initialize a zbar image scanner for reading barcodes of any format */
image_scanner.set_config(zbar::ZBAR_NONE, zbar::ZBAR_CFG_ENABLE, 1);
/* set up pudding model */
nlohmann::json pudding = configuration()["pudding"];
load_pudding_model(pudding["top-radius"], pudding["base-radius"], pudding["ring-vertex-count"], pudding["layer-count"],
pudding["y-range"][0], pudding["y-range"][1], pudding["gradient-position"]);
/* loading GL context instead of SDL context for 3D */
load_gl_context();
/* Add a texture to the camera Plane for storing frame image data */
camera_view.texture(sb::Texture());
glm::mat4 flip = glm::mat4(1);
flip[1][1] = -1;
camera_view.transformation(flip);
/* Load background tiles */
load_tiles();
/* Load button graphics and create button objects */
load_pads();
/* Load a pointer cursor from the system library that will be freed automatically */
poke = std::shared_ptr<SDL_Cursor>(SDL_CreateSystemCursor(SDL_SYSTEM_CURSOR_HAND), SDL_FreeCursor);
}
/* Assign vertices, colors and texture UV coordinates to the pudding model */
void Pudding::load_pudding_model(float top_radius, float base_radius, int ring_vertex_count, int layer_count, float min_y,
float max_y, float gradient_position)
{
size_t ii;
const glm::vec3 *layer_top_color, *layer_bottom_color;
const glm::vec2 *start_vertex, *end_vertex;
float layer_top_y, layer_top_percent, layer_base_y, layer_base_percent, u_step = 1.0f / ring_vertex_count, ring_start_vertex_u;
std::vector<glm::vec2> layer_top_ring, layer_base_ring;
layer_top_ring.reserve(ring_vertex_count);
layer_base_ring.reserve(ring_vertex_count);
/* y coordinates of each ring of vertices in the pudding */
const std::map<float, float> y_coords = sb::range_percent_count(max_y, min_y, layer_count + 1);
/* loop through layers by looking at each layer's top and bottom rings simultaneously */
for (
auto layer_top_entry = y_coords.begin(), layer_base_entry = ++y_coords.begin();
layer_base_entry != y_coords.end();
layer_top_entry++, layer_base_entry++
)
{
layer_top_y = layer_top_entry->second;
layer_top_percent = layer_top_entry->first;
layer_base_y = layer_base_entry->second;
layer_base_percent = layer_base_entry->first;
layer_top_ring.clear();
layer_base_ring.clear();
sb::points_on_circle(layer_top_ring, ring_vertex_count, layer_top_percent * (base_radius - top_radius) + top_radius);
sb::points_on_circle(layer_base_ring, ring_vertex_count, layer_base_percent * (base_radius - top_radius) + top_radius);
/* layers above gradient position are brown, layers below are yellow, and the layer that contains gradient positon
* is a gradient from brown to yellow */
if (layer_top_percent <= gradient_position && layer_base_percent > gradient_position)
{
layer_top_color = &PUDDING_BROWN;
layer_bottom_color = &PUDDING_YELLOW;
}
else if (layer_top_percent <= gradient_position)
{
layer_top_color = &PUDDING_BROWN;
layer_bottom_color = &PUDDING_BROWN;
}
else
{
layer_top_color = &PUDDING_YELLOW;
layer_bottom_color = &PUDDING_YELLOW;
}
/* u coordinate will increase toward 1.0f as we go around the ring */
ring_start_vertex_u = 0.0f;
for (ii = 0; ii < layer_top_ring.size(); ii++)
{
/* triangle that includes top two vertices and first base vertex */
start_vertex = &layer_top_ring[ii];
end_vertex = &layer_top_ring[(ii + 1) % layer_top_ring.size()];
pudding_model["position"]->add(start_vertex->x, layer_top_y, start_vertex->y);
pudding_model["uv"]->add(ring_start_vertex_u, layer_top_percent);
pudding_model["position"]->add(end_vertex->x, layer_top_y, end_vertex->y);
pudding_model["uv"]->add(ring_start_vertex_u + u_step, layer_top_percent);
pudding_model["color"]->extend(*layer_top_color, 2);
pudding_model["position"]->add(layer_base_ring[ii].x, layer_base_y, layer_base_ring[ii].y);
pudding_model["uv"]->add(ring_start_vertex_u, layer_base_percent);
pudding_model["color"]->add(*layer_bottom_color);
/* triangle that includes bottom two vertices and second top vertex */
start_vertex = &layer_base_ring[ii];
pudding_model["position"]->add(start_vertex->x, layer_base_y, start_vertex->y);
pudding_model["uv"]->add(ring_start_vertex_u, layer_base_percent);
pudding_model["color"]->add(*layer_bottom_color);
pudding_model["position"]->add(end_vertex->x, layer_top_y, end_vertex->y);
pudding_model["uv"]->add(ring_start_vertex_u + u_step, layer_top_percent);
pudding_model["color"]->add(*layer_top_color);
end_vertex = &layer_base_ring[(ii + 1) % layer_base_ring.size()];
pudding_model["position"]->add(end_vertex->x, layer_base_y, end_vertex->y);
pudding_model["uv"]->add(ring_start_vertex_u + u_step, layer_base_percent);
pudding_model["color"]->add(*layer_bottom_color);
ring_start_vertex_u += u_step;
}
}
pudding_triangle_vertex_count = pudding_model["position"]->count();
/* process the top and bottom of pudding, filling each face with a triangle fan */
float y = max_y;
const glm::vec3* face_color = &PUDDING_BROWN;
Box texture_box = Box({0, 0}, {1, 1});
for (float radius : {top_radius, base_radius})
{
/* first point in a GL_TRIANGLE_FAN is the center */
pudding_model["position"]->add(0.0f, y, 0.0f);
pudding_model["uv"]->add(0.0f, 0.0f);
layer_top_ring.clear();
sb::points_on_circle(layer_top_ring, ring_vertex_count, radius);
/* loop through points on the face */
for (ii = 0; ii < layer_top_ring.size(); ii++)
{
start_vertex = &layer_top_ring[ii];
/* for GL_TRIANGLE_FAN we just need to add an outer vertex */
pudding_model["position"]->add(start_vertex->x, y, start_vertex->y);
pudding_model["uv"]->add(*start_vertex);
/* connect the ring on the last vertex */
if (ii == layer_top_ring.size() - 1)
{
end_vertex = &layer_top_ring[(ii + 1) % layer_top_ring.size()];
pudding_model["position"]->add(end_vertex->x, y, end_vertex->y);
pudding_model["uv"]->add(*end_vertex);
}
}
/* single color for the entire layer_top_ring */
pudding_model["color"]->extend(*face_color, layer_top_ring.size() + 2);
y = min_y;
face_color = &PUDDING_YELLOW;
}
pudding_fan_vertex_count = (pudding_model["position"]->count() - pudding_triangle_vertex_count) / 2;
}
/* Create GL context via super class and load vertices, UV data, and shaders */
void Pudding::load_gl_context()
{
super::load_gl_context();
/* Generate a vertex array object ID, bind it as current (requirement of OpenGL) */
vao.generate();
vao.bind();
/* Generate ID for the vertex buffer object that will hold all vertex data. Using one buffer for all attributes, data
* will be copied in one after the other. */
vbo.generate();
vbo.bind();
/* Load two shader programs, one for rendering the flat objects, and one for rendering the 3D model. Load and configure
* the flat shader program first. */
GLuint vertex_shader = load_shader("src/shaders/flat.vert", GL_VERTEX_SHADER);
GLuint fragment_shader = load_shader("src/shaders/flat.frag", GL_FRAGMENT_SHADER);
flat_program = glCreateProgram();
glAttachShader(flat_program, vertex_shader);
glAttachShader(flat_program, fragment_shader);
Plane::position->bind(0, flat_program, "in_position");
Plane::uv->bind(1, flat_program, "vertex_uv");
/* load, configure and link the 3D world program */
vertex_shader = load_shader("src/shaders/mvp.vert", GL_VERTEX_SHADER);
fragment_shader = load_shader("src/shaders/mvp.frag", GL_FRAGMENT_SHADER);
mvp_program = glCreateProgram();
glAttachShader(mvp_program, vertex_shader);
glAttachShader(mvp_program, fragment_shader);
pudding_model.attributes("position")->bind(2, mvp_program, "vertex_position");
pudding_model.attributes("uv")->bind(3, mvp_program, "vertex_uv");
pudding_model.attributes("color")->bind(4, mvp_program, "vertex_color");
sb::Log::gl_errors("after loading shaders");
/* Fill VBO with attribute data */
vbo.allocate(background.size() + pudding_model.size(), GL_STATIC_DRAW);
vbo.add(*Plane::position);
vbo.add(*Plane::uv);
vbo.add(*pudding_model.attributes("uv"));
vbo.add(*pudding_model.attributes("position"));
vbo.add(*pudding_model.attributes("color"));
sb::Log::gl_errors("after filling VBO");
/* link shaders */
link_shader(flat_program);
link_shader(mvp_program);
sb::Log::gl_errors("after linking");
/* store uniform locations after linking */
uniform["flat"]["texture"] = glGetUniformLocation(flat_program, "base_texture");
uniform["flat"]["time"] = glGetUniformLocation(flat_program, "time");
uniform["flat"]["scroll"] = glGetUniformLocation(flat_program, "scroll");
uniform["flat"]["blend"] = glGetUniformLocation(flat_program, "blend_min_hsv");
uniform["flat"]["transformation"] = glGetUniformLocation(flat_program, "transformation");
uniform["mvp"]["mvp"] = glGetUniformLocation(mvp_program, "mvp");
uniform["mvp"]["time"] = glGetUniformLocation(mvp_program, "time");
uniform["mvp"]["effect"] = glGetUniformLocation(mvp_program, "effect");
uniform["mvp"]["uv transformation"] = glGetUniformLocation(mvp_program, "uv_transformation");
uniform["mvp"]["coordinate bound"] = glGetUniformLocation(mvp_program, "coordinate_bound");
uniform["mvp"]["pudding texture"] = glGetUniformLocation(mvp_program, "pudding_texture");
/* enable alpha rendering */
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
glEnable(GL_BLEND);
sb::Log::gl_errors("after uniform locations");
}
/* Read every jpg in the folder at tile path into a GL texture and associate with the background object. */
void Pudding::load_tiles()
{
for (fs::path path : sb::glob(configuration()["resource"]["tile-path"].get<fs::path>() / ".*.jpg"))
{
sb::Texture texture {path};
texture.load();
background.texture(texture, path);
}
}
/* Load every png in the button path as a Texture and add to a map. */
void Pudding::load_pads()
{
for (fs::path path : sb::glob(configuration()["resource"]["button-path"].get<fs::path>() / ".*.png"))
{
labels[path.stem()] = sb::Texture(path);
labels[path.stem()].load();
}
nlohmann::json interface = configuration()["interface"];
camera_button.texture(labels["scan"]);
camera_button.translation({interface["main-button-single-x"], interface["main-button-y"]});
camera_button.scale(interface["main-button-scale"], window_box().aspect());
inventory_button.texture(labels["inventory"]);
inventory_button.translation({interface["main-button-double-x"], interface["main-button-y"]});
inventory_button.scale(interface["main-button-scale"], window_box().aspect());
previous_button.texture(labels["arrow"]);
previous_button.translation(glm::vec2({-1, 1}) * interface["arrow-button-location"].get<glm::vec2>());
previous_button.scale(interface["arrow-button-scale"], window_box().aspect());
next_button.texture(labels["arrow"]);
next_button.translation(interface["arrow-button-location"]);
next_button.scale(interface["arrow-button-scale"], window_box().aspect());
next_button.rotation(glm::radians(180.0f));
}
/*!
* Try to create cv::VideoCapture object using device ID #0. If successful, this will also create a GL texture ID and
* storage for the camera frame on the GPU, so it must be called after GL context has been created. Create and detach
* a thread which will continuously read frame data.
*/
void Pudding::open_camera()
{
#ifndef __EMSCRIPTEN__
/* Open the OpenCV capture, using device ID #0 to get the default attached camera. */
int device_id = 0;
capture.open(device_id);
std::ostringstream message;
if (capture.isOpened())
{
message << "Opened and initialized " << capture.get(cv::CAP_PROP_FRAME_WIDTH) << "x" <<
capture.get(cv::CAP_PROP_FRAME_HEIGHT) << ", " << capture.get(cv::CAP_PROP_FPS) <<
"fps video capture device ID #" << device_id << " using " << capture.getBackendName();
/* Check config for a requested camera resolution, and if there is one, try applying it to the `cv::VideoCapture`. The
* requested resolution may not be available, and if so, `cv::VideoCapture` will choose a resolution. If the resulting
* resolution is different from the config value, print the resolution the capture device was set to instead. */
if (configuration()["display"].contains("camera-resolution"))
{
capture.set(cv::CAP_PROP_FRAME_WIDTH, configuration()["display"]["camera-resolution"][0]);
capture.set(cv::CAP_PROP_FRAME_HEIGHT, configuration()["display"]["camera-resolution"][1]);
message << std::endl << "Changed resolution to " << configuration()["display"]["camera-resolution"];
if (capture.get(cv::CAP_PROP_FRAME_WIDTH) != configuration()["display"]["camera-resolution"][0] ||
capture.get(cv::CAP_PROP_FRAME_HEIGHT) != configuration()["display"]["camera-resolution"][1])
{
message << " (but got " << capture.get(cv::CAP_PROP_FRAME_WIDTH) << "x" << capture.get(cv::CAP_PROP_FRAME_HEIGHT) << ")";
}
}
/* Generate a texture the size of the camera's resolution. */
camera_view.texture().generate({capture.get(cv::CAP_PROP_FRAME_WIDTH), capture.get(cv::CAP_PROP_FRAME_HEIGHT)});
/* Create and detach a thread which will read frame data */
std::thread camera_thread(&Pudding::capture_frame, this);
camera_thread.detach();
}
else
{
message << "failed to open video capture device ID #" << device_id;
}
sb::Log::log(message);
#else
emscripten_run_script("open_camera()");
#endif
}
void Pudding::close_camera()
{
#ifndef __EMSCRIPTEN__
capture.release();
#else
emscripten_run_script("close_camera()");
#endif
}
/* Respond to command events */
void Pudding::respond(SDL_Event& event)
{
if (get_delegate().compare(event, "up"))
{
item_carousel.next(items);
}
else if (get_delegate().compare(event, "right"))
{
if (items.size() > 0)
{
current_item().next_texture();
}
}
else if (get_delegate().compare(event, "down"))
{
item_carousel.previous(items);
}
else if (get_delegate().compare(event, "left"))
{
if (items.size() > 0)
{
current_item().previous_texture();
}
}
else if (get_delegate().compare(event, "toggle-camera"))
{
camera_switch.toggle();
}
else if (get_delegate().compare(event, "toggle-item"))
{
show_item = !show_item;
}
/* The effect command switches the active effect to the next in the list, wrapping around at the end */
else if (get_delegate().compare(event, "effect"))
{
effect_id = ++effect_id % EFFECT_COUNT;
glUseProgram(mvp_program);
glUniform1i(uniform["mvp"]["effect"], effect_id);
}
else if (get_delegate().compare(event, "tile"))
{
background.next();
}
/* Mouse interface */
else if (event.type == SDL_MOUSEMOTION || event.type == SDL_MOUSEBUTTONDOWN)
{
/* Get the secondary window viewport dimensions in NDC and pixel resolution for sizing the arrow buttons and transforming
* the mouse coordinates. */
Box viewport_ndc = sb::Display::ndc;
/* Drag viewport completely closed to the bottom of the screen */
viewport_ndc.top(viewport_ndc.bottom(), true);
nlohmann::json interface = configuration()["interface"];
/* Drag viewport back up the height of the pop-up window */
viewport_ndc.drag_top(interface["pop-up-viewport-height"]);
/* Get the viewport in pixel resolution to size the buttons to be square inside the viewport */
Box viewport_pixel = get_display().ndc_to_pixel(viewport_ndc);
/* Get mouse coordinates in NDC and pixel resolution in both main window and secondary */
glm::vec2 mouse_pixel = event.type == SDL_MOUSEBUTTONDOWN ? glm::vec2{event.button.x, event.button.y} :
glm::vec2{event.motion.x, event.motion.y};
glm::vec2 mouse_ndc {
float(mouse_pixel.x) / window_box().width() * 2.0f - 1.0f, (1.0f - float(mouse_pixel.y) / window_box().height()) * 2.0f - 1.0f
};
glm::vec2 mouse_viewport_ndc {
mouse_ndc.x, (1.0f - (float(mouse_pixel.y) - float(viewport_pixel.top())) / viewport_pixel.height()) * 2.0f - 1.0f
};
bool over_camera_button = !camera_switch && !item_display_active() && camera_button.collide(mouse_ndc),
over_inventory_button = items.size() > 0 && !item_display_active() && !camera_switch && inventory_button.collide(mouse_ndc),
over_close_area = (camera_switch || item_display_active()) && get_display().ndc_subsection(main_viewport).collide(mouse_ndc),
over_previous_button = item_display_active() && previous_button.collide(mouse_viewport_ndc),
over_next_button = item_display_active() && next_button.collide(mouse_viewport_ndc);
/* Check for collisions with anything clickable */
if (over_camera_button || over_inventory_button || over_close_area || over_previous_button || over_next_button)
{
/* Set cursor to pokey finger */
if (SDL_GetCursor() != poke.get())
{
SDL_SetCursor(poke.get());
}
/* Respond to a click */
if (event.type == SDL_MOUSEBUTTONDOWN)
{
if (over_camera_button || over_inventory_button || over_close_area)
{
/* Reset cursor to default arrow */
SDL_SetCursor(SDL_GetDefaultCursor());
if (over_camera_button)
{
camera_switch.connect();
}
else if (over_inventory_button)
{
show_item = true;
/* Scale buttons according to viewport that is going to open */
next_button.scale(interface["arrow-button-scale"], viewport_pixel.aspect());
previous_button.scale(interface["arrow-button-scale"], viewport_pixel.aspect());
}
else if (over_close_area)
{
camera_switch.disconnect();
show_item = false;
}
}
else
{
/* Handle arrow buttons */
if (over_next_button)
{
if (current_item().at_last())
{
item_carousel.next(items);
current_item().to_first();
}
else
{
current_item().next_texture();
}
}
else
{
if (current_item().at_first())
{
item_carousel.previous(items);
current_item().to_last();
}
else
{
current_item().previous_texture();
}
}
}
}
}
else if (SDL_GetCursor() == poke.get())
{
SDL_SetCursor(SDL_GetDefaultCursor());
}
}
}
/* Build an Item object by submitting the upc parameter to multiple APIs and taking
* relevant results from each. Result JSON will be saved if saving is enabled in the global
* configuration
*/
void Pudding::add_item(const std::string& upc)
{
Item item;
item.upc(upc);
if (configuration()["api"]["open-food-enabled"])
{
incorporate_open_api(item, OPEN_FOOD_API_URL);
}
if (configuration()["api"]["open-products-enabled"])
{
incorporate_open_api(item, OPEN_PRODUCTS_API_URL);
}
if (configuration()["api"]["nutronix-enabled"])
{
incorporate_nutronix_api(item);
}
if (configuration()["api"]["edamam-enabled"])
{
incorporate_edamam_api(item);
}
if (configuration()["api"]["best-buy-enabled"])
{
incorporate_best_buy_api(item);
}
if (configuration()["api"]["google-books-enabled"])
{
incorporate_google_books_api(item);
}
if (item.texture_count() > 0)
{
items.push_back(item);
/* Set item index to end so newest item will display. */
item_carousel.end(items);
/* Move the camera button away from center to make room for inventory button if this is the first item added. */
if (items.size() == 1)
{
const nlohmann::json& interface = configuration()["interface"];
camera_button.translation({-1.0f * interface["main-button-double-x"].get<float>(), interface["main-button-y"]});
}
}
else
{
std::ostringstream message;
message << "discarding item, no images found for " << upc;
sb::Log::log(message);
}
}
/* Look for item upc in the Open Food/Products API and use the result to fill out item properties if found. */
void Pudding::incorporate_open_api(Item& item, const std::string& api_url)
{
std::ostringstream checking_message;
checking_message << "checking " << api_url;
sb::Log::log(checking_message);
nlohmann::json json = json_from_url(api_url + item.upc());
/* test that should determine if an Open Food API response is not empty */
if (json.value("status", 0) && json.contains("product"))
{
if (json["product"].value("image_url", "") != "")
{
std::string image_url = json["product"]["image_url"];
sb::Texture texture = texture_from_image_url(image_url);
if (texture.generated())
{
item.texture(texture, image_url);
}
}
item.brand_name(json["product"].value("brands", ""));
item.product_name(json["product"].value("product_name", ""));
if (api_url == OPEN_FOOD_API_URL)
{
save_item_json(json, item, "Open_Food_API");
}
else if (api_url == OPEN_PRODUCTS_API_URL)
{
save_item_json(json, item, "Open_Products_API");
}
}
else
{
std::ostringstream results_message;
results_message << "no results from " << api_url;
sb::Log::log(results_message);
}
}
/* Look for item upc in the Nutronix API, and use the result to fill out item properties if found
*/
void Pudding::incorporate_nutronix_api(Item& item)
{
sb::Log::log("checking Nutronix API");
/* Nutronix requires API keys in headers for validation */
nlohmann::json json = json_from_url(
NUTRONIX_API_URL + item.upc(), {
"x-app-id: " + configuration()["api"]["nutronix-app-id"].get<std::string>(),
"x-app-key: " + configuration()["api"]["nutronix-app-key"].get<std::string>()
});
/* test that should determine if a Nutronix response is not empty */
if (!(json.contains("message") && json["message"] == NUTRONIX_NOT_FOUND))
{
nlohmann::json food = json["foods"][0];
if (food.contains("photo") && food["photo"].value("thumb", "") != "")
{
std::string url = food["photo"]["thumb"];
sb::Log::log("adding image listed in Nutronix API at " + url);
sb::Texture texture = texture_from_image_url(url);
if (texture.generated())
{
item.texture(texture, url);
}
}
item.brand_name(food.value("brand_name", ""));
item.product_name(food.value("food_name", ""));
save_item_json(json, item, "Nutronix_API");
}
else
{
sb::Log::log("no results from Nutronix");
}
}
/* Submit a query to Edamam API and insert relevant results into supplied Item object
*/
void Pudding::incorporate_edamam_api(Item& item)
{
sb::Log::log("checking Edamam API");
/* build API url by concatenating relevant values into query string */
std::stringstream url;
url << "https://api.edamam.com/api/food-database/v2/parser?upc=" << item.upc() << "&app_id=" <<
configuration()["api"]["edamam-app-id"].get<std::string>() << "&app_key=" <<
configuration()["api"]["edamam-app-key"].get<std::string>();
nlohmann::json json = json_from_url(url.str());
/* test that should determine if a Edamam response has food data */
if (json.contains("hints") && json["hints"][0].contains("food"))
{
nlohmann::json food = json["hints"][0]["food"];
if (food.value("image", "") != "")
{
std::string url = food["image"];
sb::Texture texture = texture_from_image_url(url);
if (texture.generated())
{
item.texture(texture, url);
}
item.product_name(food.value("label", ""));
}
save_item_json(json, item, "Edamam_API");
}
else
{
sb::Log::log("no results from Edamam");
}
}
/* Submit a query to the Best Buy API and insert relevant results into supplied Item object
*/
void Pudding::incorporate_best_buy_api(Item& item)
{
sb::Log::log("checking Best Buy API");
/* build API url by concatenating relevant values into query string */
std::stringstream url;
url << "https://api.bestbuy.com/v1/products(upc=" << item.upc() << ")?format=json&apiKey=" <<
configuration()["api"]["best-buy-api-key"].get<std::string>();
nlohmann::json json = json_from_url(url.str());
/* test that should determine if a Best Buy response has a result */
if (json.contains("total") && json["total"].get<int>() > 0)
{
nlohmann::json product = json["products"][0];
/* look up image (for games this is box art) and "alternate views image" (for games this is a screen shot) */
for (std::string key : {"alternateViewsImage", "image"})
{
if (product.value(key, "") != "")
{
std::string url = product[key];
sb::Texture texture = texture_from_image_url(url);
if (texture.generated())
{
item.texture(texture, url);
}
}
}
item.product_name(product.value("name", ""));
save_item_json(json, item, "Best_Buy_API");
}
else
{
sb::Log::log("no results from Best Buy");
}
}
/* Look for item upc in the Google Books API and use the result to fill out item properties if found. */
void Pudding::incorporate_google_books_api(Item& item)
{
sb::Log::log("checking Google Books API");
nlohmann::json json = json_from_url(GOOGLE_BOOKS_API_URL + item.upc());
/* test that should determine if a Google Books API response is not empty */
if (json.value<int>("totalItems", 0) > 0 && json.contains("items") && json["items"][0].contains("volumeInfo"))
{
/* book specific section of the JSON */
json = json["items"][0]["volumeInfo"];
/* get the image data */
if (json.contains("imageLinks") && json["imageLinks"].value("thumbnail", "") != "")
{
std::string image_url = json["imageLinks"]["thumbnail"];
sb::Texture texture = texture_from_image_url(image_url);
if (texture.generated())
{
item.texture(texture, image_url);
}
}
if (json.contains("authors"))
{
item.brand_name(json["authors"][0]);
}
item.product_name(json.value("title", ""));
save_item_json(json, item, "Google_Books_API");
}
else
{
sb::Log::log("no results from Google Books API");
}
}
/* Write submitted JSON to file, creating parent directories if necessary, and using item and
* api_name to determine file name prefix
*/
void Pudding::save_item_json(const nlohmann::json& json, const Item& item, const std::string& api_name) const
{
if (configuration()["scan"]["json-save"])
{
fs::path path = configuration()["scan"]["json-save-directory"];
if (!fs::exists(path))
{
fs::create_directories(path);
}
std::string prefix = api_name;
if (item.full_name() != "")
{
prefix += "_" + item.full_name();
}
else
{
prefix += "_Unknown";
}
std::replace_if(prefix.begin(), prefix.end(), [](char c) { return !std::isalnum(c); }, '_');
path /= prefix + "_" + item.upc() + ".json";
std::ofstream out(path);
out << std::setw(4) << json << std::endl;
sb::Log::log("Saved JSON to " + path.string());
}
else
{
SDL_LogWarn(SDL_LOG_CATEGORY_CUSTOM, "not saving JSON, saving disabled by configuration");
}
}
/* Download the JSON data at the submitted URL, and return it as a JSON object
*/
nlohmann::json Pudding::json_from_url(const std::string& url, const std::vector<std::string>& headers)
{
std::vector<std::uint8_t> storage;
web_get_bytes(url, storage, headers);
nlohmann::json json = nlohmann::json::parse(storage);
std::stringstream json_formatted;
json_formatted << std::setw(4) << json << std::endl;
sb::Log::log(json_formatted.str(), sb::Log::DEBUG);
return json;
}
/*!
* Store the bytes retrieved from `url` in the byte vector `storage`.
*
* The compiler will determine whether to use cURL or the Emscripten Fetch API to do the retrieval, depending on whether it is compiling for
* Emscripten.
*
* The optional `headers` parameter will be added to the request when using cURL, but not when using the Emscripten Fetch API.
*
* @param url URL containing data to be retrieved
* @param storage A reference to a vector of bytes which will be filled with the data retrieved from the URL
* @param headers A reference to a vector of strings that should be passed as headers with the request. It is only supported by the cURL version.
*/
void Pudding::web_get_bytes(const std::string& url, std::vector<std::uint8_t>& storage, const std::vector<std::string>& headers) const
{
#if defined(__EMSCRIPTEN__)
/* Create a fetch attributes object. Set a callback that will be called when response data is received. Pass along the user
* storage location to be filled by the callback. */
emscripten_fetch_attr_t attr;
emscripten_fetch_attr_init(&attr);
strcpy(attr.requestMethod, "GET");
attr.attributes = EMSCRIPTEN_FETCH_LOAD_TO_MEMORY;
attr.onsuccess = fetch_success;
attr.onerror = fetch_error;
attr.userData = &storage;
emscripten_fetch(&attr, url.c_str());
#else
CURL *curl;
CURLcode result;
result = curl_global_init(CURL_GLOBAL_DEFAULT);
if (result != CURLE_OK)
{
std::cout << "curl initialization failed " << curl_easy_strerror(result) << std::endl;
}
else
{
curl = curl_easy_init();
if (curl)
{
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, Pudding::curl_write_response);
curl_easy_setopt(curl, CURLOPT_WRITEDATA, &storage);
curl_easy_setopt(curl, CURLOPT_USERAGENT, configuration()["api"]["user-agent"].get<std::string>().c_str());
struct curl_slist* list = nullptr;
if (headers.size() > 0)
{
for (const std::string& header : headers)
{
list = curl_slist_append(list, header.c_str());
}
}
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, list);
result = curl_easy_perform(curl);
curl_slist_free_all(list);
if (result != CURLE_OK)
{
std::cout << "curl request failed " << curl_easy_strerror(result) << std::endl;
}
}
else
{
std::cout << "curl initialization failed" << std::endl;
}
curl_easy_cleanup(curl);
}
curl_global_cleanup();
#endif
}
#if defined(__EMSCRIPTEN__)
/*!
* This will be called automatically when request data is sucessfully fetched by `emscripten_fetch` from `Pudding::web_get_bytes`.
* Response bytes will be inserted into the user supplied `std::vector<std::uint8_t>&` at `fetch->userData`.
*/
void Pudding::fetch_success(emscripten_fetch_t* fetch)
{
std::vector<std::uint8_t>* storage = reinterpret_cast<std::vector<std::uint8_t>*>(fetch->userData);
storage->insert(storage->end(), fetch->data, fetch->data + fetch->numBytes);
std::stringstream message;
message << "Stored " << (fetch->numBytes / 100) << "KB of image data in memory from " << fetch->url;
sb::Log::log(message.str());
emscripten_fetch_close(fetch);
}
/*!
* This will be called automatically when request data is not successfully fetched by `emscripten_fetch` from `Pudding::web_get_bytes`.
*/
void Pudding::fetch_error(emscripten_fetch_t* fetch)
{
std::stringstream message;
message << "Downloading image from " << fetch->url << " failed with status code " << fetch->status;
sb::Log::log(message.str());
emscripten_fetch_close(fetch);
}
#else
/*!
* This will be called by cURL when it has received a buffer of data. The data will be inserted into the vector at `storage`
*
* @param buffer pointer to data
* @param size size in bytes of each value
* @param count number of values
* @param storage pointer to a vector of unsigned 8-bit values where the data will be copied to
* @return number of bytes copied
*/
size_t Pudding::curl_write_response(std::uint8_t* buffer, size_t size, size_t count, std::vector<std::uint8_t>* storage)
{
size_t total_size = size * count;
storage->insert(storage->end(), buffer, buffer + total_size);
return total_size;
}
#endif
/* Allocate storage for a texture, copy the cURL response data into the storage, and return the ID that corresponds to the GL texture
*/
sb::Texture Pudding::texture_from_image_url(const std::string& url) const
{
/* this texture will be returned whether we load pixels into it or not */
sb::Texture texture;
sb::Log::log("looking up image at " + url);
std::vector<std::uint8_t> storage;
web_get_bytes(url, storage);
if (!storage.empty())
{
sb::Log::log("received image data", sb::Log::DEBUG);
/* get a Texture by passing the bytes through an RW ops which will enable the Texture object to load a Surface */
SDL_RWops* rw = SDL_RWFromConstMem(storage.data(), storage.size());
texture.load(rw);
SDL_RWclose(rw);
}
else
{
SDL_LogWarn(SDL_LOG_CATEGORY_CUSTOM, "image url returned no data");
}
return texture;
}
/* Call GL's delete texture function, and print a debug statement for testing. This is defined as a static member
* function and uses the SDL logging function instead of the inherited logging functions from Node since the object
* may not be allocated at destruction time (?)
*/
void Pudding::destroy_texture(GLuint* texture_id)
{
/* not sure why SDL_Log works here but SDL_LogDebug and SDL_LogInfo don't */
std::ostringstream message;
message << "destroying texture ID " << *texture_id;
sb::Log::log(message);
glDeleteTextures(1, texture_id);
}
/* Return the item currently selected in the inventory */
Item& Pudding::current_item()
{
try
{
return *item_carousel.current(items);
}
catch (const std::out_of_range& exception)
{
std::ostringstream message;
message << "Out of range exception: " << exception.what() << " (Attempting to retrieve an item from empty inventory)";
sb::Log::log(message);
}
}
/* Returns true if item display is toggled on and there is at least one item to display */
bool Pudding::item_display_active() const
{
return show_item && items.size() > 0;
}
/*!
* Read pixels from the camera into a `cv::Mat`.
*
* For a Linux build: This function is meant to be launched in a separate thread, where it will run continuously. Set `new_frame_available`
* to `false` before loading camera frame data into the `cv::Mat` object at `camera_frame`, then set it back to `true` to indicate new frame
* data is available in `camera_frame`.
*
* For an Emscripten build: This will load pixel data off the Emscripten heap into a `cv::Mat`. It is intended to be called synchronously in the
* main thread.
*/
void Pudding::capture_frame()
{
/* Emscripten builds will call this function from the main thread, so don't run continuously */
#ifndef __EMSCRIPTEN__
/* When the camera button is switched off, this thread will automatically finish execution. */
while (camera_switch)
{
/* The frame data in the `cv::Mat` at `pudding->camera_frame` is about to be modified by the rest of
* this function, so even if there is data stored there that hasn't been read yet, it should not
* be read by the main thread until this is set to `true`at the end of the function. */
new_frame_available = false;
/* Load camera frame data into `cv::Mat` */
time_it("read frame")([&]{
capture >> camera_frame;
});
#else
/* Convert the address of frame RGBA pixel data on the Emscripten heap into an unsigned 8-bit pointer and read the data
* into a cv::Mat. */
std::uint8_t* emscripten_camera_pixels = reinterpret_cast<std::uint8_t*>(emscripten_heap_offset);
camera_frame = cv::Mat(320, 240, CV_8UC4, emscripten_camera_pixels);
#endif
if (!camera_frame.empty())
{
/* Finished loading into `cv::Mat`, so it is new data that is safe to read. */
new_frame_available = true;
}
sb::Log::gl_errors("in capture, after capturing frame");
#ifndef __EMSCRIPTEN__
std::this_thread::sleep_for(std::chrono::milliseconds(50));
}
#endif
}
/* Update parameters and draw the screen */
void Pudding::update()
{
sb::Log::gl_errors("at beginning of update");
/* Time in seconds the game has running for */
float time_seconds = SDL_GetTicks() / 1000.0f;
/* if the config is set to refresh automatically, there may be a new barcode available */
if (current_config_barcode != configuration()["scan"]["barcode"])
{
current_config_barcode = configuration()["scan"]["barcode"];
current_barcode = current_config_barcode;
std::ostringstream message;
message << "read new barcode from config " << current_barcode;
sb::Log::log(message);
}
/* If new frame data is available, copy it from a cv::Mat into a texture, process for scanning and scan it. */
if (new_frame_available)
{
sb::Log::log("Hello, World!");
#ifdef __EMSCRIPTEN__
/* Emscripten builds load pixel data into cv::Mat synchronously */
capture_frame();
#endif
camera_view.texture().bind();
/* Fill camera view texture memory with last frame's pixels */
// camera_view.texture().load(camera_frame.ptr(), {camera_frame.cols, camera_frame.rows}, GL_BGR, GL_UNSIGNED_BYTE);
// std::cout << camera_frame.size[0] << " " << camera_frame.size[1] << std::endl;
camera_view.texture().load(camera_frame.ptr(), {320, 240}, GL_RGBA, GL_UNSIGNED_BYTE);
/* Frame data has been loaded, so there is not a new frame available anymore. */
new_frame_available = false;
/* Convert to grayscale for ZBar */
cv::cvtColor(camera_frame, camera_frame, cv::COLOR_BGR2GRAY);
if (configuration()["scan"]["enabled"])
{
zbar::Image query_image(camera_frame.cols, camera_frame.rows, "Y800", static_cast<void*>(camera_frame.data),
camera_frame.cols * camera_frame.rows);
int result = image_scanner.scan(query_image);
if (result > 0)
{
time_it("barcode lookup")([&] {
for (zbar::Image::SymbolIterator symbol = query_image.symbol_begin(); symbol != query_image.symbol_end(); ++symbol)
{
std::ostringstream message;
message << "camera scanned " << symbol->get_type_name() << " symbol " << symbol->get_data();
sb::Log::log(message);
current_camera_barcode = symbol->get_data();
current_barcode = current_camera_barcode;
}
});
}
query_image.set_data(nullptr, 0);
}
}
/* viewport box will be used to tell GL where to draw */
viewport = window_box(true);
/* shrink viewport if item texture or camera will be displayed */
if (item_display_active() || camera_switch)
{
viewport.drag_bottom(0.5f * configuration()["interface"]["pop-up-viewport-height"].get<float>() * viewport.height());
}
/* Save the main viewport dimensions */
main_viewport = viewport;
sb::Log::gl_errors("before viewport");
glViewport(viewport);
glDisable(GL_DEPTH_TEST);
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
/* switch to flat shader for background */
glUseProgram(flat_program);
/* flat shader uniforms for BG: time, texture ID, disabled HSV blend, scroll on */
glActiveTexture(GL_TEXTURE0);
glUniform1f(uniform["flat"]["time"], time_seconds);
glUniform1i(uniform["flat"]["texture"], 0);
glUniform3f(uniform["flat"]["blend"], 0.0f, 0.0f, 1.0f);
glUniform1i(uniform["flat"]["scroll"], true);
glUniformMatrix4fv(uniform["flat"]["transformation"], 1, GL_FALSE, &glm::mat4(1)[0][0]);
/* disable pudding attributes and enable background attributes */
pudding_model.disable();
background.enable();
background.current().bind();
/* draws bg vertices and texture */
glDrawArrays(GL_TRIANGLES, 0, background.attributes("position")->count());
/* turn off scrolling */
glUniform1i(uniform["flat"]["scroll"], false);
sb::Log::gl_errors("after background, before pudding");
/* draw pudding model using MVP shader */
glUseProgram(mvp_program);
/* calculate the transformation matrix for displaying pudding in viewport */
model = glm::rotate(model, weight(configuration()["pudding"]["rotation-speed"].get<float>()), Y_UNIT_NORMAL_3D);
projection = glm::perspective(
glm::radians(40.0f * 1 / viewport.aspect()), viewport.aspect(), 0.1f, 100.0f);
mvp = projection * VIEW_MATRIX * model;
/* uniforms */
glUniform1f(uniform["mvp"]["time"], time_seconds);
glUniformMatrix4fv(uniform["mvp"]["mvp"], 1, GL_FALSE, &mvp[0][0]);
/* disable bg attributes and enable pudding attributes */
background.disable();
pudding_model.attributes("position")->enable();
if (items.size() == 0)
{
// glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
// pudding_model.attributes("color")->enable();
}
else
{
// glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
// pudding_model.attributes("color")->enable();
pudding_model.attributes("uv")->enable();
glUniform1i(uniform["mvp"]["pudding texture"], 0);
glActiveTexture(GL_TEXTURE0);
current_item().current_texture().bind();
}
/* draw pudding model */
glEnable(GL_DEPTH_TEST);
/* draw the sides of the pudding */
glDrawArrays(GL_TRIANGLES, 0, pudding_triangle_vertex_count);
sb::Log::gl_errors("after pudding sides, before pudding top/bottom");
/* enable squircling and draw the top and bottom of pudding */
glUniform1i(uniform["mvp"]["uv transformation"], UV_SQUIRCLE);
glUniform1f(uniform["mvp"]["coordinate bound"], configuration()["pudding"]["top-radius"]);
glDrawArrays(GL_TRIANGLE_FAN, pudding_triangle_vertex_count, pudding_fan_vertex_count);
glUniform1f(uniform["mvp"]["coordinate bound"], configuration()["pudding"]["base-radius"]);
glDrawArrays(GL_TRIANGLE_FAN, pudding_triangle_vertex_count + pudding_fan_vertex_count, pudding_fan_vertex_count);
/* disable squircling for all other drawing */
glUniform1i(uniform["mvp"]["uv transformation"], UV_NONE);
/* regular fill mode enabled for all other drawing */
// glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
sb::Log::gl_errors("after pudding, before item or camera view");
/* only do more drawing if items are downloaded or camera is enabled */
if (item_display_active() || camera_switch)
{
/* switch to flat shader for item and camera */
glUseProgram(flat_program);
pudding_model.disable();
glDisable(GL_DEPTH_TEST);
/* just need to set these once since we're drawing one texture per viewport */
glUniform1i(uniform["flat"]["texture"], 0);
glActiveTexture(GL_TEXTURE0);
/* move viewport to the bottom of screen */
viewport.top(viewport.bottom(), true);
viewport.bottom(window_box(true).bottom(), true);
/* reset blend to display the original texture colors */
glUniform3f(uniform["flat"]["blend"], 0.0f, 0.0f, 1.0f);
/* draw the current item image if we're supposed to */
if (item_display_active())
{
/* shrink viewport to half size if camera will also be displayed */
if (camera_switch)
{
viewport.left(viewport.cx(), true);
}
glViewport(viewport);
current_item().current_texture().bind();
current_item().view().enable();
/* draws rectangle vertices and rectangle texture using UV coords */
glDrawArrays(GL_TRIANGLES, 0, current_item().view().attributes("position")->count());
current_item().view().disable();
/* Draw arrows for cycling through items in inventory */
if (items.size() > 1 || current_item().texture_count() > 1)
{
next_button.draw(uniform["flat"]["transformation"]);
previous_button.draw(uniform["flat"]["transformation"]);
}
}
/* draw the camera view if the camera button has been switched on */
if (camera_switch)
{
viewport.left(window_box(true).left());
glViewport(viewport);
/* bind texture for drawing */
glUniformMatrix4fv(uniform["flat"]["transformation"], 1, GL_FALSE, &camera_view.transformation()[0][0]);
camera_view.texture().bind();
camera_view.enable();
/* draws rectangle vertices and rectangle texture using UV coords */
glDrawArrays(GL_TRIANGLES, 0, camera_view.attributes("position")->count());
}
}
else
{
/* Draw the camera button if neither the camera or inventory is displayed */
glUseProgram(flat_program);
camera_button.draw(uniform["flat"]["transformation"]);
/* And the inventory button if there are items scanned into the inventory */
if (items.size() > 0)
{
inventory_button.draw(uniform["flat"]["transformation"]);
}
}
SDL_GL_SwapWindow(window());
sb::Log::gl_errors("at end of update");
/* add a new item if a new barcode was scanned or entered */
if (current_barcode != previous_barcode)
{
add_item(current_barcode);
previous_barcode = current_barcode;
}
}
/* Construct a Pad using a texture, a translation, a scale, and a callback function. A Pad is a Plane which can be clicked
* to launch an arbitrary user function. It can be sized and placed by setting the translation and scale values. The translation
* is relative to (0.0, 0.0), and the scale is relative to the Plane, which has opposite corners at (-1.0, -1.0) and (1.0, 1.0).
* The texture is the graphic that displays in the Pad location. The callback must be a function that doesn't return a value or
* accept any arguments. */
Pad::Pad(sb::Texture texture, glm::vec2 translation, float scale, float ratio, std::function<void()> on_connect, float rotation)
{
this->texture(texture);
this->translation(translation);
this->scale(scale, ratio);
if (rotation)
{
this->rotation(rotation);
}
this->on_connect(on_connect);
collision_box.invert_y(true);
}
/* Set angle in radians the pad will be rotated. The pad will be rotated around its center. The collision box will not
* change, so the box will not contain the entire pad if the angle is not a multiple of pi/2. The pad's transformation
* matrix will automatically be set to incorporate this rotation transformation. */
void Pad::rotation(float angle)
{
rotation_angle = angle;
transform();
}
/* Set the scale using a factor and ratio that will transform the pad in the X and Y dimensions. The ratio will determine
* how much each axis is scaled. If the ratio is above one, the X-axis's scale will be divided by the ratio. If the ratio
* is below one, the Y-axis's scale will be multiplied by the aspect ratio. If the aspect ratio of the window is given,
* this will force the pad to display as a square, and the ratio will be relative to the shorter axis. The collision box
* will be scaled by the same factors. The pad's transformation matrix will automatically be set to incorporate this
* scale transformation. */
void Pad::scale(float factor, float ratio)
{
scale_factor = factor;
scale_ratio = ratio;
transform();
}
/* Set a translation for the pad object in the X and Y dimension using a 2d vector. The collision box will be moved by the
* same translation. The pad's transformation matrix will automatically be set to incorporate this translation
* transformation. */
void Pad::translation(const glm::vec2& translation)
{
translation_vector = translation;
transform();
}
/* Set the transformation matrix for the pad object by applying the scale to the translation and the rotation to the
* resulting matrix, meaning the transformations will be applied to the pad in the order of: translate, scale, and
* rotate. The collision box will be scaled and moved to fit around the position coordinates that would result from
* applying this transformation to the position coordinates. */
void Pad::transform()
{
glm::vec3 scale { scale_factor, scale_factor, 1.0f };
if (scale_ratio > 1.0f)
{
scale.x /= scale_ratio;
}
else if (scale_ratio < 1.0f)
{
scale.y *= scale_ratio;
}
collision_box.size(2.0f * glm::vec2{scale.x, scale.y}, true);
collision_box.center(translation_vector);
Model::transformation(glm::translate(glm::vec3{translation_vector.x, translation_vector.y, 0.0f}) *
glm::scale(scale) * glm::rotate(rotation_angle, ROTATION_AXIS));
}
/* Set the function that will run when a pad object is clicked. */
void Pad::on_connect(std::function<void()> on_connect)
{
connection.on_connect(on_connect);
}
/* Returns true if the point at position collides with the pad's collision box. */
bool Pad::collide(const glm::vec2& position) const
{
return collision_box.collide(position);
}
void Pad::draw(GLuint uniform_id)
{
glUniformMatrix4fv(uniform_id, 1, GL_FALSE, &transformation()[0][0]);
texture().bind();
enable();
glDrawArrays(GL_TRIANGLES, 0, attributes("position")->count());
disable();
}
void glViewport(Box box)
{
glViewport(box.left(), box.bottom(), box.width(), box.height());
}
#ifdef __EMSCRIPTEN__
/* This will bind the global functions to Emscripten so the camera pixel data can be transferred */
EMSCRIPTEN_BINDINGS(my_module)
{
function("flag_frame", &flag_frame);
function("set_heap_offset", &set_heap_offset);
}
#endif