展会信息港展会大全

无Java开发Android应用(NativeActivity) 你会吗
来源:互联网   发布日期:2016-01-14 10:19:45   浏览:2375次  

导读:最新的 Android 2 3 无需 Java 就可以开发应用, 这里是官方给的例子程序 ,还等什么,赶快来体验一番吧!看看你的水平。1 [代码]AndroidManifest xmlmanifestxmlns:android=http: schemas android co ...

最新的 Android 2.3 无需 Java 就可以开发应用, 这里是官方给的例子程序 ,还等什么,赶快来体验一番吧!看看你的水平。

1.[代码]AndroidManifest.xml

<manifest xmlns:android="http://schemas.android.com/apk/res/android"

package="com.example.native_activity"

android:versionCode="1"

android:versionName="1.0">

<!-- This is the platform API where NativeActivity was introduced. -->

<uses-sdk android:minSdkVersion="8" />

<!-- This .apk has no Java code itself, so set hasCode to false. -->

<application android:label="@string/app_name" android:hasCode="false">

<!-- Our activity is the built-in NativeActivity framework class.

This will take care of integrating with our NDK code. -->

<activity android:name="android.app.NativeActivity"

android:label="@string/app_name"

android:configChanges="orientation|keyboardHidden">

<!-- Tell NativeActivity the name of or .so -->

<meta-data android:name="android.app.lib_name"

android:value="native-activity" />

<intent-filter>

<action android:name="android.intent.action.MAIN" />

<category android:name="android.intent.category.LAUNCHER" />

</intent-filter>

</activity>

</application>

</manifest>

2. [代码]Demo.c

#include <jni.h>

#include <errno.h>

#include <EGL/egl.h>

#include <GLES/gl.h>

#include <android/sensor.h>

#include <android/log.h>

#include <android_native_app_glue.h>

#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, "native-activity", __VA_ARGS__))

#define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, "native-activity", __VA_ARGS__))

/**

* Our saved state data.

*/

struct saved_state {

float angle;

int32_t x;

int32_t y;

};

/**

* Shared state for our app.

*/

struct engine {

struct android_app* app;

ASensorManager* sensorManager;

const ASensor* accelerometerSensor;

ASensorEventQueue* sensorEventQueue;

int animating;

EGLDisplay display;

EGLSurface surface;

EGLContext context;

int32_t width;

int32_t height;

struct saved_state state;

};

/**

* Initialize an EGL context for the current display.

*/

static int engine_init_display(struct engine* engine) {

// initialize OpenGL ES and EGL

/*

* Here specify the attributes of the desired configuration.

* Below, we select an EGLConfig with at least 8 bits per color

* component compatible with on-screen windows

*/

const EGLint attribs[] = {

EGL_SURFACE_TYPE, EGL_WINDOW_BIT,

EGL_BLUE_SIZE, 8,

EGL_GREEN_SIZE, 8,

EGL_RED_SIZE, 8,

EGL_NONE

};

EGLint w, h, dummy, format;

EGLint numConfigs;

EGLConfig config;

EGLSurface surface;

EGLContext context;

EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);

eglInitialize(display, 0, 0);

/* Here, the application chooses the configuration it desires. In this

* sample, we have a very simplified selection process, where we pick

* the first EGLConfig that matches our criteria */

eglChooseConfig(display, attribs, &config, 1, &numConfigs);

/* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is

* guaranteed to be accepted by ANativeWindow_setBuffersGeometry().

* As soon as we picked a EGLConfig, we can safely reconfigure the

* ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */

eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);

ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format);

surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);

context = eglCreateContext(display, config, NULL, NULL);

if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {

LOGW("Unable to eglMakeCurrent");

return -1;

}

eglQuerySurface(display, surface, EGL_WIDTH, &w);

eglQuerySurface(display, surface, EGL_HEIGHT, &h);

engine->display = display;

engine->context = context;

engine->surface = surface;

engine->width = w;

engine->height = h;

engine->state.angle = 0;

// Initialize GL state.

glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);

glEnable(GL_CULL_FACE);

glShadeModel(GL_SMOOTH);

glDisable(GL_DEPTH_TEST);

return 0;

}

/**

* Just the current frame in the display.

*/

static void engine_draw_frame(struct engine* engine) {

if (engine->display == NULL) {

// No display.

return;

}

// Just fill the screen with a color.

glClearColor(((float)engine->state.x)/engine->width, engine->state.angle,

((float)engine->state.y)/engine->height, 1);

glClear(GL_COLOR_BUFFER_BIT);

eglSwapBuffers(engine->display, engine->surface);

}

/**

* Tear down the EGL context currently associated with the display.

*/

static void engine_term_display(struct engine* engine) {

if (engine->display != EGL_NO_DISPLAY) {

eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);

if (engine->context != EGL_NO_CONTEXT) {

eglDestroyContext(engine->display, engine->context);

}

if (engine->surface != EGL_NO_SURFACE) {

eglDestroySurface(engine->display, engine->surface);

}

eglTerminate(engine->display);

}

engine->animating = 0;

engine->display = EGL_NO_DISPLAY;

engine->context = EGL_NO_CONTEXT;

engine->surface = EGL_NO_SURFACE;

}

/**

* Process the next input event.

*/

static int32_t engine_handle_input(struct android_app* app, AInputEvent* event) {

struct engine* engine = (struct engine*)app->userData;

if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION) {

engine->animating = 1;

engine->state.x = AMotionEvent_getX(event, 0);

engine->state.y = AMotionEvent_getY(event, 0);

return 1;

}

return 0;

}

/**

* Process the next main command.

*/

static void engine_handle_cmd(struct android_app* app, int32_t cmd) {

struct engine* engine = (struct engine*)app->userData;

switch (cmd) {

case APP_CMD_SAVE_STATE:

// The system has asked us to save our current state.Do so.

engine->app->savedState = malloc(sizeof(struct saved_state));

*((struct saved_state*)engine->app->savedState) = engine->state;

engine->app->savedStateSize = sizeof(struct saved_state);

break;

case APP_CMD_INIT_WINDOW:

// The window is being shown, get it ready.

if (engine->app->window != NULL) {

engine_init_display(engine);

engine_draw_frame(engine);

}

break;

case APP_CMD_TERM_WINDOW:

// The window is being hidden or closed, clean it up.

engine_term_display(engine);

break;

case APP_CMD_GAINED_FOCUS:

// When our app gains focus, we start monitoring the accelerometer.

if (engine->accelerometerSensor != NULL) {

ASensorEventQueue_enableSensor(engine->sensorEventQueue,

engine->accelerometerSensor);

// We'd like to get 60 events per second (in us).

ASensorEventQueue_setEventRate(engine->sensorEventQueue,

engine->accelerometerSensor, (1000L/60)*1000);

}

break;

case APP_CMD_LOST_FOCUS:

// When our app loses focus, we stop monitoring the accelerometer.

// This is to avoid consuming battery while not being used.

if (engine->accelerometerSensor != NULL) {

ASensorEventQueue_disableSensor(engine->sensorEventQueue,

engine->accelerometerSensor);

}

// Also stop animating.

engine->animating = 0;

engine_draw_frame(engine);

break;

}

}

/**

* This is the main entry point of a native application that is using

* android_native_app_glue.It runs in its own thread, with its own

* event loop for receiving input events and doing other things.

*/

void android_main(struct android_app* state) {

struct engine engine;

// Make sure glue isn't stripped.

app_dummy();

memset(&engine, 0, sizeof(engine));

state->userData =

state->onAppCmd = engine_handle_cmd;

state->onInputEvent = engine_handle_input;

engine.app = state;

// Prepare to monitor accelerometer

engine.sensorManager = ASensorManager_getInstance();

engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager,

ASENSOR_TYPE_ACCELEROMETER);

engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager,

state->looper, LOOPER_ID_USER, NULL, NULL);

if (state->savedState != NULL) {

// We are starting with a previous saved state; restore from it.

engine.state = *(struct saved_state*)state->savedState;

}

// loop waiting for stuff to do.

while (1) {

// Read all pending events.

int ident;

int events;

struct android_poll_source* source;

// If not animating, we will block forever waiting for events.

// If animating, we loop until all events are read, then continue

// to draw the next frame of animation.

while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events,

(void**)&source)) >= 0) {

// Process this event.

if (source != NULL) {

source->process(state, source);

}

// If a sensor has data, process it now.

if (ident == LOOPER_ID_USER) {

if (engine.accelerometerSensor != NULL) {

ASensorEvent event;

while (ASensorEventQueue_getEvents(engine.sensorEventQueue,

&event, 1) > 0) {

LOGI("accelerometer: x=%f y=%f z=%f",

event.acceleration.x, event.acceleration.y,

event.acceleration.z);

}

}

}

// Check if we are exiting.

if (state->destroyRequested != 0) {

engine_term_display(&engine);

return;

}

}

if (engine.animating) {

// Done with events; draw next animation frame.

engine.state.angle += .01f;

if (engine.state.angle > 1) {

engine.state.angle = 0;

}

// Drawing is throttled to the screen update rate, so there

// is no need to do timing here.

engine_draw_frame(&engine);

}

}

}

赞助本站

人工智能实验室

相关热词: Java NativeActivity 开发

AiLab云推荐
推荐内容
展开

热门栏目HotCates

Copyright © 2010-2024 AiLab Team. 人工智能实验室 版权所有    关于我们 | 联系我们 | 广告服务 | 公司动态 | 免责声明 | 隐私条款 | 工作机会 | 展会港