For "training" reasons, i am trying to develop a simple graphic application (using SDL2 and directfb) for an arm board equipped with a touchscreen. The target is to draw a window with a red square to be used as button. App code is as follow:
Button/debug functions:
// Define a structure for the button
typedef struct {
SDL_Rect draw_rect; // dimensions of button
struct {
Uint8 r, g, b, a;
} colour;
bool pressed;
} button_t;
static bool button(SDL_Renderer* r, button_t* btn) {
// draw button
SDL_SetRenderDrawColor(r, btn->colour.r, btn->colour.g, btn->colour.b, btn->colour.a);
SDL_RenderFillRect(r, &btn->draw_rect);
// if button press detected - reset it so it wouldn't trigger twice
if (btn->pressed) {
btn->pressed = false;
return true;
}
return false;
}
// Write a function to process the mouse events for the button
static void button_process_event(button_t* btn, const SDL_Event* ev)
{
// react on mouse click within button rectangle by setting 'pressed'
if (ev->type == SDL_MOUSEBUTTONDOWN) {
if (ev->button.button == SDL_BUTTON_LEFT &&
ev->button.x >= btn->draw_rect.x &&
ev->button.x <= (btn->draw_rect.x + btn->draw_rect.w) &&
ev->button.y >= btn->draw_rect.y &&
ev->button.y <= (btn->draw_rect.y + btn->draw_rect.h)) {
btn->pressed = true;
}
}
}
// Write a function to print the mouse event
static void print_event(const SDL_Event* e) {
// print the event type
if (e->type == SDL_FINGERDOWN) {
printf("touch finger DOWN event\n");
}
else if (e->type == SDL_FINGERUP) {
printf("touch finger UP event\n");
}
else if (e->type == SDL_FINGERMOTION) {
printf("touch finger MOTION event\n");
}
else if (e->type == SDL_MOUSEMOTION) {
printf("Mouse motion event\n");
printf("mouse motion coords: %d,%d\n",e->motion.x,e->motion.y);
}
else if (e->type == SDL_MOUSEBUTTONDOWN) {
printf("Mouse button down event\n");
printf("button coords: %d,%d\n",e->button.x,e->button.y);
}
else if (e->type == SDL_MOUSEBUTTONUP) {
printf("Mouse button up event\n");
printf("button coords: %d,%d\n",e->button.x,e->button.y);
}
else if (e->type == SDL_MOUSEWHEEL) {
printf("Mouse wheel event\n");
}
else{
printf("unknown event %d\n",e->type);
}
}
int main(int argc, char* argv[])
{
// Initialize the SDL2 video subsystem
SDL_Init(SDL_INIT_VIDEO);
// Create a window with a title, position, and size
SDL_Window* window = SDL_CreateWindow("Empty Window", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, 320, 240, SDL_WINDOW_SHOWN | SDL_WINDOW_RESIZABLE | SDL_WINDOW_FULLSCREEN );
// Create a renderer for the window
SDL_Renderer* renderer = SDL_CreateRenderer(window, -1, 0);
// Set the renderer's draw color
SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255); // black
// Clear the renderer with the draw color
SDL_RenderClear(renderer);
// Create an instance of the button structure
button_t start_button = {
.colour = {.r = 255, .g = 10, .b = 10, .a = 255}, // white
.draw_rect = {.x = 100, .y = 0, .w = 100, .h = 100}, // center of the window
};
then the main function:
// Main loop
SDL_Event event;
bool quit = false;
while (!quit) {
// Poll events
while (SDL_PollEvent(&event)) {
// Quit on close, window close, or 'escape' key hit
if (event.type == SDL_QUIT ||
(event.type == SDL_WINDOWEVENT && event.window.event == SDL_WINDOWEVENT_CLOSE) ||
(event.type == SDL_KEYDOWN && event.key.keysym.sym == SDLK_ESCAPE)) {
quit = true;
}
// Pass event to button
button_process_event(&start_button, &event);
//debug event
print_event(&event);
}
// Clear the renderer
SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255); // black
SDL_RenderClear(renderer);
// Draw the button
if (button(renderer, &start_button)) {
// Handle the button click event
printf("Start button pressed\n");
}
// Present the renderer
SDL_RenderPresent(renderer);
}
// Destroy the renderer and the window
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
// Quit the SDL2 video subsystem
SDL_Quit();
return 0;
}
as expected the code above produces a black window with a red squared button, however, i observe some strange behaviours:
when i first try to touch the screen (in every place of the screen) the following strings are printed at stdout: “Mouse button down event button coords: 0,0 Mouse button up event button coords: 0,0” it seems that screen areas remains “focused” until i drag the finger in some other screen location (without motion events the focused area remains the same);
the events generated on finger touch/motion are only mouse events : mouse button down + mouse button up or mouse motion (i tried to use the mouse events related hints without success);
Coherently to what described in 1. to make the program print “Start button pressed” (which should be printed only when the red squared button is pressed) i have to drag the finger into the button area first (producing motion events in that area), then the program start to print “Start button pressed” for every touch of the screen (indipendently from the area touched).
Does anyone have any suggestions for getting my app to work “normally”?
The problem was in using directfb with libinput as underlying "driver", which for some reason was interpreting the touchscreen device as a pure mouse device. By recompiling directfb with the option --enable-tslib and exporting the env_var TSLIB_TSDEVICE=/dev/input/event0 (in my case this was the event device) the problem has been solved.