How to set onTouch Listener for drawn texture in Android OpenGL-ES

江枫思渺然 提交于 2019-12-22 01:06:22

问题


I have a texture in my application and I can drag it using finger but how can I set a onTouch Listener for the texture, when I touch anywhere of the phone screen the texture move to where I touch, how can I make it only move when finger touch the texture?

Any guidance will be appreciated~

this is my main class:

import android.app.Activity;
import android.os.Bundle;
import android.view.WindowManager;

public class MainActivity extends Activity {

private Stage stage;

@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    //screen setting
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
    getWindow().clearFlags(WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);

    setContentView(R.layout.main_layout);
    stage = (Stage)findViewById(R.id.my_stage);
}

@Override
protected void onPause() {
    super.onPause();
    stage.onPause();
}

@Override
protected void onResume() {
    super.onResume();
    stage.onResume();
}
}

this is stage sub class:

import android.content.Context;
import android.opengl.GLES10;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

public class Stage extends GLSurfaceView{

//Stage width and height
private float w, h;
//Texture position
private float xPos, yPos;
//Scale ratio
private float r, ratio, dist1, dist2;
//Screen width and height
private int screenWidth, screenHeight;
//Our native vertex buffer
private FloatBuffer vertexBuffer;
private Texture tex;
MyRenderer mRenderer;

@Override
public boolean onTouchEvent(MotionEvent event) {
    final int action = event.getAction() & MotionEvent.ACTION_MASK;
    float x, y, x1, x2, y1, y2;
    int pointerIndex;

    if(event.getPointerCount()==2){
        if (action == MotionEvent.ACTION_POINTER_UP) {
            x1 = event.getX(0);
            y1 = event.getY(0);
        } else {
            x1 = event.getX(0);
            y1 = event.getY(0);
        }
        if (action == MotionEvent.ACTION_POINTER_DOWN) {
            x2 = event.getX(1);
            y2 = event.getY(1);
            dist1 = (float)Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));
        } else {
            x2 = event.getX(1);
            y2 = event.getY(1);
            dist2 = (float)Math.sqrt((x1 - x2) * (x1 - x2) + (y1 - y2) * (y1 - y2));
        }
        ratio = dist2/dist1;
        mRenderer.setRatio(ratio);
        requestRender();
    }
    if(event.getPointerCount()==1){
        if (action == MotionEvent.ACTION_POINTER_DOWN) {
                x = event.getX();
                y = event.getY();
        } else {
            pointerIndex = event.getActionIndex();
            x = event.getX(pointerIndex);
            y = event.getY(pointerIndex);
        }
        mRenderer.setXY(x, y);
        requestRender();
    }
    return true;
}

public Stage(Context context, AttributeSet attrs) {
    super(context, attrs);
    setEGLConfigChooser(8, 8, 8, 8, 0, 0);
    mRenderer = new MyRenderer();
    setRenderer(mRenderer);
    setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
    float vertices[] = {
            -0.5f, -0.5f,  0.0f,  // 0. left-bottom
            0.5f, -0.5f,  0.0f,  // 1. right-bottom
            -0.5f,  0.5f,  0.0f,  // 2. left-top
            0.5f,  0.5f,  0.0f   // 3. right-top
    };

    ByteBuffer vbb = ByteBuffer.allocateDirect(vertices.length * 4);
    vbb.order(ByteOrder.nativeOrder());
    vertexBuffer = vbb.asFloatBuffer();
    vertexBuffer.put(vertices);
    vertexBuffer.position(0);

    tex = new Texture(R.drawable.kdk);
}

private class MyRenderer implements GLSurfaceView.Renderer {

    private Object lock = new Object();
    public void setXY(float x, float y) {
        synchronized (lock) {
            xPos = x * w / screenWidth;
            yPos = y * h / screenHeight;
        }
    }

    public void setRatio(float scale){
        r = scale;
    }

    public final void onDrawFrame(GL10 gl) {
        gl.glClear(GLES10.GL_COLOR_BUFFER_BIT);
        tex.prepare(gl, GL10.GL_CLAMP_TO_EDGE);
        gl.glVertexPointer(3, GL10.GL_FLOAT, 0, vertexBuffer);
        synchronized (lock) {
            tex.draw(gl, xPos, yPos, tex.getWidth()*r, tex.getHeight()*r, 0);
        }
    }

    public final void onSurfaceChanged(GL10 gl, int width, int height) {
        gl.glClearColor(0, 0, 0, 0);

        if(width > height) {
            h = 600;
            w = width * h / height;
        } else {
            w = 600;
            h = height * w / width;
        }
        screenWidth = width;
        screenHeight = height;

        xPos = w/2;
        yPos = h/2;
        r=1;

        gl.glViewport(0, 0, screenWidth, screenHeight);
        gl.glMatrixMode(GL10.GL_PROJECTION);
        gl.glLoadIdentity();
        gl.glOrthof(0, w, h, 0, -1, 1);
        gl.glMatrixMode(GL10.GL_MODELVIEW);
        gl.glLoadIdentity();
    }

    public final void onSurfaceCreated(GL10 gl, EGLConfig config) {
        // Set up alpha blending
        gl.glEnable(GL10.GL_ALPHA_TEST);
        gl.glEnable(GL10.GL_BLEND);
        gl.glBlendFunc(GL10.GL_ONE, GL10.GL_ONE_MINUS_SRC_ALPHA);

        // We are in 2D. Why needs depth?
        gl.glDisable(GL10.GL_DEPTH_TEST);

        // Enable vertex arrays (we'll use them to draw primitives).
        gl.glEnableClientState(GL10.GL_VERTEX_ARRAY);

        // Enable texture coordination arrays.
        gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);

        tex.load(getContext());
    }

}

}

this is texture sub class:

import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.opengl.GLES10;
import android.opengl.GLES20;
import android.opengl.GLUtils;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.opengles.GL10;

public class Texture {

/**
 * The OpenGL ES texture name associated with this texture.
 */
protected int textureId;

/**
 * The horizontal and vertical dimensions of the image.
 */
protected int width, height;

/**
 * The resource identifier for the image we want to load.
 */
int resourceId;

/**
 * Whether or not we should generate mip maps.
 */
boolean mipmaps;

/**
 * The buffer containing texture mappings.
 */
private FloatBuffer tempTextureBuffer = null;

Texture(int resourceId, boolean mipmaps) {
    this.resourceId = resourceId;
    this.textureId = -1;
    this.mipmaps = mipmaps;
}

Texture(int resourceId) {
    this(resourceId, false);
}

/**
 * Generates a new OpenGL ES texture name (identifier).
 * @return The newly generated texture name.
 */
private static final int newTextureID() {
    int[] temp = new int[1];
    GLES10.glGenTextures(1, temp, 0);
    return temp[0];
}

public final int getWidth() {
    return width;
}

public final int getHeight() {
    return height;
}

public final void load(Context context) {
    // Load the bitmap from resources.
    BitmapFactory.Options opts = new BitmapFactory.Options();
    opts.inScaled = false;
    Bitmap bmp = BitmapFactory.decodeResource(context.getResources(), resourceId, opts);

    // Update this texture instance's width and height.
    width = bmp.getWidth();
    height = bmp.getHeight();

    // Create and bind a new texture name.
    textureId = newTextureID();
    GLES10.glBindTexture(GL10.GL_TEXTURE_2D, textureId);

    // Load the texture into our texture name.
    GLUtils.texImage2D(GL10.GL_TEXTURE_2D, 0, bmp, 0);

    // Set magnification filter to bilinear interpolation.
    GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);

    if(mipmaps) {
        // If mipmaps are requested, generate mipmaps and set minification filter to trilinear filtering.
        GLES20.glGenerateMipmap(GLES20.GL_TEXTURE_2D);
        GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR_MIPMAP_LINEAR);
    }
    else GLES10.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);

    // Recycle the bitmap.
    bmp.recycle();

    // If texture mapping buffer has not been initialized yet, do it now.
    if(tempTextureBuffer == null)
        buildTextureMapping();
}

/**
 * Builds the texture mapping buffer.
 */
private void buildTextureMapping() {
    // The array of texture mapping coordinates.
    final float texture[] = {
            0, 0, // The first vertex
            1, 0, // The second vertex
            0, 1, // The third vertex
            1, 1, // The fourth vertex
    };

    // Create a native buffer out of the above array.
    final ByteBuffer ibb = ByteBuffer.allocateDirect(texture.length * 4);
    ibb.order(ByteOrder.nativeOrder());
    tempTextureBuffer = ibb.asFloatBuffer();
    tempTextureBuffer.put(texture);
    tempTextureBuffer.position(0);
}

/**
 * Deletes the texture name and marks this instance as unloaded.
 */
public final void destroy() {
    GLES10.glDeleteTextures(1, new int[] {textureId}, 0);

    // Setting this value to -1 indicates that it is unloaded.
    textureId = -1;
}

public final boolean isLoaded() {
    return textureId >= 0;
}

public final void prepare(GL10 gl, int wrap) {
    // Enable 2D texture
    gl.glEnable(GL10.GL_TEXTURE_2D);

    // Bind our texture name
    gl.glBindTexture(GL10.GL_TEXTURE_2D, textureId);

    // Set texture wrap methods
    gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_S, wrap);
    gl.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_WRAP_T, wrap);

    // Enable texture coordinate arrays and load (activate) ours
    gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY);
    gl.glTexCoordPointer(2, GL10.GL_FLOAT, 0, tempTextureBuffer);
}

public final void draw(GL10 gl, float x, float y, float w, float h, float rot) {
    gl.glPushMatrix();
    gl.glTranslatef(x, y, 0);
    gl.glRotatef(rot, 0, 0, 1);
    gl.glScalef(w, h, 0); // Scaling will be performed first.
    gl.glDrawArrays(GL10.GL_TRIANGLE_STRIP, 0, 4);
    gl.glPopMatrix();
}

}

回答1:


You have all the data you need to compute that. It seems you use the coordinate system with top left (0,0) and bottom right at (w,h). The touch coordinates must be transformed into the same system such as touchX*(w/screenWidth), similar for vertical coordinate.

The position of your texture is also defined with center, static coordinates and scale which should be enough to find the actual positions of the texture vertices.

Now consider you have point touch and your texture border values as left, right, bottom, top.

bool didHit = touch.x>=left && touch.x<=right && touch.y>=bottom && touch.y<=top;


来源:https://stackoverflow.com/questions/32834363/how-to-set-ontouch-listener-for-drawn-texture-in-android-opengl-es

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!