User Tools

Site Tools


c_scene_tutorial

Scene Tutorial

GestureWorks supports scene mode. With this mode the application developer defines a set of touch objects. The application passes touch points to GestureWorks, does hit testing on them to associate touch points with different touch objects and sends that to GestureWorks, and then updates GestureWorks and retrieves gestures.

Full code for this sample is at GestureWorks\C++\gestureworks-demo-scene\.

1 - Include the header “GestureWorks2.h” and link to the appropriate C++ dll library. GestureWorks is built with Visual Studio and includes 32 and 64 bit dlls for debug and release.

2 - Declare a variable to hold GestureWorks:

static GestureWorks *gesture_works = nullptr;

This will hold the instance of GestureWorks for this scene.

3 - Initialize GestureWorks. Here we do a basic initialization of GestureWorks passing in the screen width and screen height:

gesture_works = initializeGestureWorks(width, height);

4 - Register a touch object for GestureWorks. In a larger application there could be many different touch objects registered. For this simple example we are only registering one object.

registerTouchObject(gesture_works, s_test_object_name);

5 - Register additional gestures for the touch object. Here we are adding drag, rotate, scale to a touch object. If we don't specify a custom GML file there are a basic set of gestures available to associate with each touch object. These gestures are documented here: Default Gestures.

addGesture(gesture_works, s_test_object_name, "ndrag");
addGesture(gesture_works, s_test_object_name, "nrotate");
addGesture(gesture_works, s_test_object_name, "nscale");

6 - Similar to the screen tutorial, touch information is passed to GestureWorks:

        switch (uMsg)
	{
	case WM_POINTERDOWN:
	{
		POINTER_INFO pi;
		GetPointerInfo(GET_POINTERID_WPARAM(wParam), &pi);

		addEvent(gesture_works, pi.pointerId, GW_TOUCHADDED, (float)pi.ptPixelLocation.x, (float)pi.ptPixelLocation.y);

		break;
	}
	case WM_POINTERUP:
	{
		POINTER_INFO pi;
		GetPointerInfo(GET_POINTERID_WPARAM(wParam), &pi);

		addEvent(gesture_works, pi.pointerId, GW_TOUCHREMOVED, (float)pi.ptPixelLocation.x, (float)pi.ptPixelLocation.y);

		break;
	}
	case WM_POINTERUPDATE:
	{
		POINTER_INFO pi;
		GetPointerInfo(GET_POINTERID_WPARAM(wParam), &pi);

		addEvent(gesture_works, pi.pointerId, GW_TOUCHUPDATE, (float)pi.ptPixelLocation.x, (float)pi.ptPixelLocation.y);

		break;
	}

7 - Read touch points. Here we take the current points GestureWorks is processing and if we had more than one touch object we would decide, usually through some collision detection hit test, which object this point is associated with. We pass the correct object id to GestureWorks to associate the touch point with addTouchPointToObject.

		PointInfo *points;
		const int point_count = getPointEvents(gesture_works, &points);

		for (int i = 0; i != point_count; ++i)
		{
			PointInfo point = points[i];

			switch (point.status)
			{
			case GW_TOUCHADDED:
				addTouchPointToObject(gesture_works, s_test_object_name, point.point_id);
				break;

			case GW_TOUCHUPDATE:
				break;

			case GW_TOUCHREMOVED:
				break;
			}
		}

8 - Update the frame for GestureWorks.

updateFrame(gesture_works);

9 - Read gestures, each gesture will be associated with a specific touch object. If we had more than one touch object registered we could look at the event.target to see which object the gesture is associated with.

	GestureInfo *gesture_info;
	const int gesture_count = getGestureEvents(gesture_works, &gesture_info);
	for (int i = 0; i != gesture_count; ++i)
	{
		GestureInfo event = gesture_info[i];

		std::cout << "Reading Gesture for Touch object " << event.target << std::endl;

		if (strcmp("drag", event.gesture_type) == 0)
		{
			s_image_x += event.getValue("drag_dx") * s_window_width;
			s_image_y += event.getValue("drag_dy") * s_window_height;
		}
		else if (strcmp("rotate", event.gesture_type) == 0)
		{
			s_image_rotation += event.getValue("rotate_dtheta");
		}
		else if (strcmp("scale", event.gesture_type) == 0)
		{
			s_image_scale += event.getValue("scale_dsx") * s_window_width;
		}
	}

10 - Destroy GestureWorks as the application closes. This is done in wndProc

	case WM_DESTROY:
		destroyGestureWorks(gesture_works);

Final Code
The final code is at GestureWorks2\Samples\C++\gestureworks-demo-screen\src\main.cpp. Here is the complete code:

#include <Windows.h>
#include <iostream>
#include "render.h"

// 1 - Link library, header
#include "GestureWorks2.h"

// 2 - Declare GestureWorks
static GestureWorks *gesture_works = nullptr;

// Some variables to display our test image
static int s_window_width, s_window_height;
static float s_image_x, s_image_y;
static float s_image_rotation;
static float s_image_scale;

// Define a name for our test object
static const char *s_test_object_name = "TestObject";

HWND CreateAdjustedWindow(char *windowTitle, WNDPROC wndProc, bool windowed = false, int width = CW_USEDEFAULT, int height = CW_USEDEFAULT, int x = CW_USEDEFAULT, int y = CW_USEDEFAULT)
{
	WNDCLASSEXA wc;
	ZeroMemory(&wc, sizeof(wc));
	wc.style = CS_OWNDC;
	wc.cbSize = sizeof(WNDCLASSEX);
	wc.lpfnWndProc = wndProc;
	wc.hInstance = GetModuleHandle(0);
	wc.hCursor = LoadCursor(NULL, IDC_ARROW);
	wc.lpszClassName = windowTitle;
	RegisterClassExA(&wc);

	RECT windowRect;
	DWORD style;
	if (windowed)
	{
		style = WS_OVERLAPPED;
		windowRect = { 0, 0, width, height };
		AdjustWindowRectEx(&windowRect, style, FALSE, 0);
	}
	else
	{
		style = WS_POPUPWINDOW ^ WS_BORDER;
		HWND desktop = GetDesktopWindow();
		GetWindowRect(desktop, &windowRect);
		x = 0;
		y = 0;
	}

	width = windowRect.right - windowRect.left;
	height = windowRect.bottom - windowRect.top;
	HWND hWnd = CreateWindowExA(0, wc.lpszClassName, windowTitle, style, x, y, width, height, NULL, NULL, wc.hInstance, NULL);
	ShowWindow(hWnd, SW_NORMAL);
	s_window_width = width;
	s_window_height = height;

	// 3 - Initialize GestureWorks
	gesture_works = initializeGestureWorks(width, height);

	// 4 - Register touch object
	registerTouchObject(gesture_works, s_test_object_name);

	// 5 - Register additional gestures
	addGesture(gesture_works, s_test_object_name, "ndrag");
	addGesture(gesture_works, s_test_object_name, "nrotate");
	addGesture(gesture_works, s_test_object_name, "nscale");

	return hWnd;
}

void PumpEvents()
{
	MSG msg;
	while (PeekMessageA(&msg, NULL, 0, 0, 1) > 0)
	{
		TranslateMessage(&msg);
		DispatchMessage(&msg);
	}
}

static bool g_running = true;
// 6 - Pass input to GestureWorks
static LRESULT CALLBACK WndProc(HWND hWnd, UINT uMsg, WPARAM wParam, LPARAM lParam)
{
	switch (uMsg)
	{
	case WM_POINTERDOWN:
	{
		POINTER_INFO pi;
		GetPointerInfo(GET_POINTERID_WPARAM(wParam), &pi);

		addTouchEvent(gesture_works, pi.pointerId, GW_TOUCHADDED, (float)pi.ptPixelLocation.x, (float)pi.ptPixelLocation.y);

		break;
	}
	case WM_POINTERUP:
	{
		POINTER_INFO pi;
		GetPointerInfo(GET_POINTERID_WPARAM(wParam), &pi);

		addTouchEvent(gesture_works, pi.pointerId, GW_TOUCHREMOVED, (float)pi.ptPixelLocation.x, (float)pi.ptPixelLocation.y);

		break;
	}
	case WM_POINTERUPDATE:
	{
		POINTER_INFO pi;
		GetPointerInfo(GET_POINTERID_WPARAM(wParam), &pi);

		addTouchEvent(gesture_works, pi.pointerId, GW_TOUCHUPDATE, (float)pi.ptPixelLocation.x, (float)pi.ptPixelLocation.y);

		break;
	}

	case WM_CREATE:
		Re_Init(hWnd);
		break;
	case WM_DESTROY:
		// 10 - Destroy GestureWorks
		destroyGestureWorks(gesture_works);
		g_running = false;
		break;
	case WM_KEYDOWN:
		if (wParam == VK_ESCAPE)
			g_running = false;
		break;
	case WM_SIZE:
		Re_SizeToClientArea();
		break;
	}
	return DefWindowProc(hWnd, uMsg, wParam, lParam);
}

int CALLBACK WinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPSTR lpCmdLine, int nCmdShow)
{
	HWND hWnd = CreateAdjustedWindow("GestureWorks Demo", WndProc, false);

	RECT clientRect;
	GetClientRect(hWnd, &clientRect);

	Re_Image *testImage = Re_LoadImage(L"GestureWorks.png");

	float screenCenterX = clientRect.right / 2.0f;
	float screenCenterY = clientRect.bottom / 2.0f;
	s_image_x = clientRect.right / 2.0f - testImage->width / 2.0f;
	s_image_y = clientRect.bottom / 2.0f - testImage->height / 2.0f;
	s_image_rotation = 0.0f;
	s_image_scale = 1.0f;

	float angle = 0;
	while (g_running)
	{
		PumpEvents();

		Re_ClearScreen(0.52f, 0.80f, 0.98f, 0);

		Re_BeginDraw();
		Re_Identity();

		// 7 - Read touch points, here we take the current points GW is processing and if we had more than one touch object
		// we would decide, usually through some collision detection hit test, which object this point is associated with
		// and on addTouchPointToObject pass in the correct object id
		PointInfo *points;
		const int point_count = getPointEvents(gesture_works, &points);

		for (int i = 0; i != point_count; ++i)
		{
			PointInfo point = points[i];

			switch (point.status)
			{
			case GW_TOUCHADDED:
				addTouchPointToObject(gesture_works, s_test_object_name, point.point_id);
				break;

			case GW_TOUCHUPDATE:
				break;

			case GW_TOUCHREMOVED:
				break;
			}
		}

		// 8 - Update the frame for GestureWorks 
		updateFrame(gesture_works);


		// 9 - Read gestures, each gesture will be associated with a specific touch object
		GestureInfo *gesture_info;
		const int gesture_count = getGestureEvents(gesture_works, &gesture_info);
		for (int i = 0; i != gesture_count; ++i)
		{
			GestureInfo event = gesture_info[i];

			std::cout << "Reading Gesture for Touch object " << event.target << std::endl;

			if (strcmp("drag", event.gesture_type) == 0)
			{
				s_image_x += event.getValue("drag_dx") * s_window_width;
				s_image_y += event.getValue("drag_dy") * s_window_height;
			}
			else if (strcmp("rotate", event.gesture_type) == 0)
			{
				s_image_rotation += event.getValue("rotate_dtheta");
			}
			else if (strcmp("scale", event.gesture_type) == 0)
			{
				s_image_scale += event.getValue("scale_dsx") * s_window_width;
				s_image_scale = s_image_scale > 4.0f ? 4.0f : s_image_scale;
				s_image_scale = s_image_scale < 0.5f ? 0.5f : s_image_scale;
			}
		}

		Re_Scale(s_image_scale, s_image_scale, s_image_x + testImage->width / 2.0f, s_image_y + testImage->height / 2.0f);
		Re_Rotate(s_image_rotation, s_image_x + testImage->width / 2.0f, s_image_y + testImage->height / 2.0f);
		Re_DrawImage(testImage, s_image_x, s_image_y);

		Re_EndDraw();

		// waits for vsync
		Re_PresentBackBuffer();
	}

	Re_ReleaseImage(testImage);
	Re_Cleanup();
}

c_scene_tutorial.txt · Last modified: 2019/01/21 16:34 (external edit)