从现在开始,我决定把自己学习和使用OpenGLES的经历,通过笔记的方式记录下来,一来方便以后遗忘了查阅,二来也希望能帮助到遇见同样问题的童鞋们,话不多说,直接开整!
业务需求是这样的,需要绘制一个基础的场景和一个3D模型,然后3D模型的姿态(xyz坐标偏移角度)需要和外部设备同步。简而言之,就是真实的设备看起来是怎样的摆放姿势,3D模型就同样要怎样摆放,而且设备旋转位移摆动需要在3D模型上同步体现出来。 我们来分析一下这个需求:第一,需要在世界坐标系中绘制一个静态的场景,该场景不会随设备旋转或移动;第二,需要在世界坐标系中绘制一个3D设备模型,是可以单独进行旋转缩放位移等操作的。
首先创建一个基础场景类,用来存储基础场景的属性,这里我就分别使用红绿蓝三原色来绘制xyz坐标系:
public class CoordinationView {
private int programId;
private float[] matrix = new float[16];
private int positionHandle;
private int matrixHandle;
private int colorHandle;
private ArrayList<Float> vertexList = new ArrayList<>();
private FloatBuffer vertexBuf;
private FloatBuffer colorBuf;
private float[] color = {
1f, 0f, 0f, 1f,
1f, 0f, 0f, 1f,
0f, 1f, 0f, 1f,
0f, 1f, 0f, 1f,
0f, 0f, 1f, 1f,
0f, 0f, 1f, 1f
};
public CoordinationView(){
Matrix.setIdentityM(matrix, 0);
initBaseLineData();
}
private void initBaseLineData(){
//X轴
vertexList.add(0f);
vertexList.add(0f);
vertexList.add(0f);
vertexList.add(200f);
vertexList.add(0f);
vertexList.add(0f);
//Y轴
vertexList.add(0f);
vertexList.add(0f);
vertexList.add(0f);
vertexList.add(0f);
vertexList.add(200f);
vertexList.add(0f);
//Z轴
vertexList.add(0f);
vertexList.add(0f);
vertexList.add(0f);
vertexList.add(0f);
vertexList.add(0f);
vertexList.add(200f);
vertexBuf = GlUtil.createFloatBuffer(vertexList);
colorBuf = GlUtil.createFloatBuffer(color);
}
public float[] getMatrix(){
return matrix;
}
public void createProgram(){
programId = GlUtil.createProgram(ResReadUtils.readResource(R.raw.vertex_base),
ResReadUtils.readResource(R.raw.fragment_base));
positionHandle = GLES20.glGetAttribLocation(programId, "vPosition");
matrixHandle = GLES20.glGetUniformLocation(programId,"vMatrix");
colorHandle = GLES20.glGetAttribLocation(programId, "aColor");
}
public void drawSelf(){
GLES20.glUseProgram(programId);
GLES20.glUniformMatrix4fv(matrixHandle, 1, false, matrix, 0);
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glEnableVertexAttribArray(colorHandle);
GLES20.glVertexAttribPointer(colorHandle, 4, GLES20.GL_FLOAT, false, 4*4, colorBuf);
GLES20.glVertexAttribPointer(positionHandle,3, GLES20.GL_FLOAT, false, 3*4,vertexBuf);
GLES20.glDrawArrays(GLES20.GL_LINES,0,vertexList.size()/3);
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(colorHandle);
}
}
顶点着色器vertex_base.glsl:
attribute vec4 vPosition;
uniform mat4 vMatrix;
attribute vec4 aColor;
varying vec4 vColor;
void main() {
vColor = aColor;
gl_Position = vMatrix*vPosition;
}
片元着色器fragment_base.glsl:
precision mediump float;
varying vec4 vColor;
void main() {
gl_FragColor = vColor;
}
然后创建一个3D模型类,用来绘制3D模型和控制它旋转缩放位移:
public class SensorDevice {
private int programId;
private float[]matrix =new float[16];
private Obj3Dobj3D;
private int positionHandle;
private int textureHandle;
private int matrixHandle;
private int coordinateHandle;
private int normalHandle;
public SensorDevice(){
Matrix.setIdentityM(matrix, 0);
obj3D =new Obj3D();
ObjReader.read(App.getInstance().getContext().getResources().openRawResource(R.raw.device), obj3D);
}
public float[]getMatrix() {
return matrix;
}
public void createProgram(){
programId = GlUtil.createProgram(ResReadUtils.readResource(R.raw.vertex),
ResReadUtils.readResource(R.raw.fragment));
normalHandle= GLES20.glGetAttribLocation(programId,"vNormal");
positionHandle= GLES20.glGetAttribLocation(programId, "vPosition");
coordinateHandle=GLES20.glGetAttribLocation(programId,"vCoord");
matrixHandle=GLES20.glGetUniformLocation(programId,"vMatrix");
textureHandle=GLES20.glGetUniformLocation(programId,"vTexture");
}
public void drawSelf(){
GLES20.glUseProgram(programId);
GLES20.glUniformMatrix4fv(matrixHandle, 1, false, matrix, 0);
GLES20.glEnableVertexAttribArray(positionHandle);
GLES20.glVertexAttribPointer(positionHandle,3, GLES20.GL_FLOAT, false, 3*4,obj3D.vert);
GLES20.glEnableVertexAttribArray(normalHandle);
GLES20.glVertexAttribPointer(normalHandle,3, GLES20.GL_FLOAT, false, 3*4,obj3D.vertNorl);
GLES20.glDrawArrays(GLES20.GL_TRIANGLES,0,obj3D.vertCount);
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(normalHandle);
}
}
顶点着色器vertex.glsl:
attribute vec3 vPosition;
attribute vec2 vCoord;
uniform mat4 vMatrix;
varying vec2 textureCoordinate;
attribute vec3 vNormal; //法向量
varying vec4 vDiffuse; //用于传递给片元着色器的散射光最终强度
//返回散射光强度
vec4 pointLight(vec3 normal,vec3 lightLocation,vec4 lightDiffuse){
//变换后的法向量
vec3 newTarget=normalize((vMatrix*vec4(normal+vPosition,1)).xyz-(vMatrix*vec4(vPosition,1)).xyz);
//表面点与光源的方向向量
vec3 vp=normalize(lightLocation-(vMatrix*vec4(vPosition,1)).xyz);
return lightDiffuse*max(0.0,dot(newTarget,vp));
}
void main(){
gl_Position = vMatrix*vec4(vPosition,1);
textureCoordinate = vCoord;
vec4 at=vec4(0.5,0.5,0.5,0.5); //光照强度
vec3 pos=vec3(0.0,0.0,250.0); //光照位置
vDiffuse=pointLight(vNormal,pos,at);
}
片元着色器fragment.glsl:
precision mediump float;
varying vec2 textureCoordinate;
uniform sampler2D vTexture;
varying vec4 vDiffuse;//接收从顶点着色器过来的散射光分量
void main() {
vec4 finalColor=vec4(1.0);
//给此片元颜色值
gl_FragColor=finalColor*vDiffuse+finalColor*vec4(0.15,0.15,0.15,1.0);
}
最后附上调用方法:
public class DemoGLSurfaceView extends GLSurfaceView {
public DemoGLSurfaceView(Context context) {
this(context, null);
}
public DemoGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
setZOrderOnTop(true);
setEGLConfigChooser(8, 8, 8, 8, 16, 0);
getHolder().setFormat(PixelFormat.TRANSLUCENT);
setEGLContextClientVersion(2);
DemoRender demoRender =new DemoRender();
setRenderer(demoRender);
setRenderMode(RENDERMODE_CONTINUOUSLY);
}
}
public class DemoRender implements GLSurfaceView.Renderer {
private RenderFilterrenderFilter;
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
GLES20.glEnable(GLES20.GL_DEPTH_TEST);
renderFilter =new RenderFilter();
renderFilter.createProgram();
}
@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
GLES20.glViewport(0, 0, width, height);
if (renderFilter !=null) {
renderFilter.setViewSize(width, height);
}
}
@Override
public void onDrawFrame(GL10 gl) {
//设置屏幕背景色RGBA
GLES20.glClearColor(0f,0f,0f,1);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT|GLES20.GL_DEPTH_BUFFER_BIT);
renderFilter.drawBaseGrid();
renderFilter.draw3dObj();
}
}
public class RenderFilter {
private float[]mViewMatrix =new float[16];
private float[]mProjectionMatrix =new float[16];
private float[]mvpMatrix =new float[16];
private float[]cameraPositions = {
150, 150, 150,
0, 0, 0,
0, 0, 1
};
private CoordinationViewcoordinationView;
private SensorDevicesensorDevice;
public RenderFilter() {
initMatrix();
coordinationView =new CoordinationView();
sensorDevice =new SensorDevice();
}
private void initMatrix() {
Matrix.setIdentityM(mViewMatrix, 0);
Matrix.setIdentityM(mProjectionMatrix, 0);
Matrix.setIdentityM(mvpMatrix, 0);
}
public void createProgram(){
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
coordinationView.createProgram();
sensorDevice.createProgram();
}
public void setViewSize(int width, int height){
float ratio = (float)width/height;
Matrix.frustumM(mProjectionMatrix, 0, -ratio, ratio, -1, 1,
1f, 10000);
setLook();
}
public void setLook(){
Matrix.setLookAtM(mViewMatrix, 0,
cameraPositions[0],cameraPositions[1],cameraPositions[2],
cameraPositions[3],cameraPositions[4],cameraPositions[5],
cameraPositions[6],cameraPositions[7],cameraPositions[8]);
Matrix.multiplyMM(coordinationView.getMatrix(), 0, mProjectionMatrix, 0,
mViewMatrix, 0);
Matrix.multiplyMM(sensorDevice.getMatrix(), 0, mProjectionMatrix, 0,
mViewMatrix, 0);
}
public void setMatrix(float[] matrix){
Matrix.setIdentityM(mvpMatrix, 0);
Matrix.multiplyMM(mvpMatrix, 0, mProjectionMatrix, 0,
mViewMatrix, 0);
Matrix.multiplyMM(mvpMatrix, 0, mvpMatrix, 0, matrix, 0);
System.arraycopy(mvpMatrix, 0, sensorDevice.getMatrix(), 0, mvpMatrix.length);
}
public void drawBaseGrid(){
coordinationView.drawSelf();
}
public void draw3dObj() {
//绕Z轴旋转,单位旋转1度
Matrix.rotateM(sensorDevice.getMatrix(), 0, 1, 0, 0, 1);
sensorDevice.drawSelf();
}
}
3D模型加载方法和GLSL脚本加载方法:
public class GlUtil {
private static final StringTAG ="GlUtil";
public static int createProgram(String vertexSource, String fragmentSource){
int vertexShader =loadShader(GLES20.GL_VERTEX_SHADER,vertexSource);
if(vertexShader ==0){
return 0;
}
int pixelShader =loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
if(pixelShader ==0){
return 0;
}
int program = GLES20.glCreateProgram();
checkGlError("glCreateProgram");
if(program ==0){
Log.e(TAG, "Could not create program");
}
GLES20.glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
GLES20.glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
GLES20.glLinkProgram(program);
int[] linkStatus =new int[1];
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
if(linkStatus[0] != GLES20.GL_TRUE){
Log.e(TAG, "Could not link program:");
Log.e(TAG, GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program =0;
}
return program;
}
public static int loadShader(int shaderType, String source){
int shader = GLES20.glCreateShader(shaderType);
checkGlError("glCreateShader type="+shaderType);
GLES20.glShaderSource(shader, source);
GLES20.glCompileShader(shader);
int[] compiled =new int[1];
GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
if(compiled[0] ==0){
Log.e(TAG, "Could not compile shader " + shaderType +":");
Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
GLES20.glDeleteShader(shader);
shader =0;
}
return shader;
}
public static void checkGlError(String op){
int error = GLES20.glGetError();
if(error != GLES20.GL_NO_ERROR){
String msg = op +":glError 0x"+Integer.toHexString(error);
Log.e(TAG, msg);
throw new RuntimeException(msg);
}
}
public static FloatBuffercreateFloatBuffer(float[] coords){
ByteBuffer bb = ByteBuffer.allocateDirect(coords.length*4);
bb.order(ByteOrder.nativeOrder());
FloatBuffer fb = bb.asFloatBuffer();
fb.put(coords);
fb.position(0);
return fb;
}
public static FloatBuffercreateFloatBuffer(List data){
float[] coords =new float[data.size()];
for(int i=0;i<data.size();i++){
coords[i] = data.get(i);
}
return createFloatBuffer(coords);
}
}
public class ResReadUtils {
/**
* 读取资源
* @return
*/
public static StringreadResource(int resourceId){
StringBuilder builder =new StringBuilder();
try {
InputStream inputStream = App.getInstance().getResources().openRawResource(resourceId);
InputStreamReader streamReader =new InputStreamReader(inputStream);
BufferedReader bufferedReader =new BufferedReader(streamReader);
String textLine;
while ((textLine = bufferedReader.readLine()) !=null) {
builder.append(textLine);
builder.append("\n");
}
}catch (Exception e) {
e.printStackTrace();
}
return builder.toString();
}
}
public class ObjReader {
public static void read(InputStream stream, Obj3D obj3D){
ArrayList<Float> alv=new ArrayList<Float>();//原始顶点坐标列表
ArrayList<Float> alvResult=new ArrayList<Float>();//结果顶点坐标列表
ArrayList<Float> norlArr=new ArrayList<>();
float[] ab=new float[3],bc=new float[3],norl=new float[3];
try{
InputStreamReader isr=new InputStreamReader(stream);
BufferedReader br=new BufferedReader(isr);
String temps=null;
float mx=0,my=0,mz=0;
while((temps=br.readLine())!=null)
{
String[] tempsa=temps.split("[ ]+");
if(tempsa[0].trim().equals("v")) {//此行为顶点坐标
if (Float.parseFloat(tempsa[1])>mx) {
mx = Float.parseFloat(tempsa[1]);
}
if (Float.parseFloat(tempsa[2])>my) {
my = Float.parseFloat(tempsa[1]);
}
if (Float.parseFloat(tempsa[3])>mz) {
mz = Float.parseFloat(tempsa[3]);
}
alv.add(Float.parseFloat(tempsa[1]));
alv.add(Float.parseFloat(tempsa[2]));
alv.add(Float.parseFloat(tempsa[3]));
} else if(tempsa[0].trim().equals("f")) {//此行为三角形面
int a=Integer.parseInt(tempsa[1])-1;
int b=Integer.parseInt(tempsa[2])-1;
int c=Integer.parseInt(tempsa[3])-1;
// int d=Integer.parseInt(tempsa[4])-1;
//abc和acd两个三角形组成的四边形
alvResult.add(alv.get(a*3));
alvResult.add(alv.get(a*3+1));
alvResult.add(alv.get(a*3+2));
alvResult.add(alv.get(b*3));
alvResult.add(alv.get(b*3+1));
alvResult.add(alv.get(b*3+2));
alvResult.add(alv.get(c*3));
alvResult.add(alv.get(c*3+1));
alvResult.add(alv.get(c*3+2));
//这里也是因为下载模型文件的坑。下了个出了顶点和面啥也没有的模型文件
//为了有3d效果,给它加个光照,自己计算顶点法线
//用面法向量策略。按理说点法向量更适合这种光滑的3D模型,但是计算起来太复杂了,so
//既然主要讲3D模型加载,就先用面法向量策略来吧
//通常3D模型里面会包含法向量信息的。
//法向量的计算,ABC三个空间点,他们的法向量为向量AB与向量BC的外积,所以有:
for (int i=0;i<3;i++){
ab[i]=alv.get(a*3+i)-alv.get(b*3+i);
bc[i]=alv.get(b*3+i)-alv.get(c*3+i);
}
norl[0]=ab[1]*bc[2]-ab[2]*bc[1];
norl[1]=ab[2]*bc[0]-ab[0]*bc[2];
norl[2]=ab[0]*bc[1]-ab[1]*bc[0];
//上面两个三角形,传入了6个顶点,这里循环6次,简单粗暴
for (int i=0;i<3;i++){
norlArr.add(norl[0]);
norlArr.add(norl[1]);
norlArr.add(norl[2]);
}
}
}
//这些就是比较熟悉的了,一切都为了能够把数据给GPU
int size=alvResult.size();
float[] vXYZ=new float[size];
for(int i=0;i<size;i++){
vXYZ[i]=alvResult.get(i);
}
ByteBuffer byteBuffer=ByteBuffer.allocateDirect(4*size);
byteBuffer.order(ByteOrder.nativeOrder());
obj3D.vert=byteBuffer.asFloatBuffer();
obj3D.vert.put(vXYZ);
obj3D.vert.position(0);
obj3D.vertCount=size/3;
int vbSize=norlArr.size();
float[] vbArr=new float[size];
for(int i=0;i<size;i++){
vbArr[i]=norlArr.get(i);
}
ByteBuffer vb=ByteBuffer.allocateDirect(4*vbSize);
vb.order(ByteOrder.nativeOrder());
obj3D.vertNorl=vb.asFloatBuffer();
obj3D.vertNorl.put(vbArr);
obj3D.vertNorl.position(0);
// Log.e("TAG", "read: mx="+mx+",my="+my+",mz="+mz);
}catch(Exception e){
e.printStackTrace();
}
}
public static List<Obj3D> readMultiObj(Context context, String file){
boolean isAssets;
ArrayList<Obj3D> data=new ArrayList<>();
ArrayList<Float> oVs=new ArrayList<Float>();//原始顶点坐标列表
ArrayList<Float> oVNs=new ArrayList<>(); //原始顶点法线列表
ArrayList<Float> oVTs=new ArrayList<>(); //原始贴图坐标列表
ArrayList<Float> oFVs=new ArrayList<>(); //面顶点
ArrayList<Float> oFVNs=new ArrayList<>();
ArrayList<Float> oFVTs=new ArrayList<>();
HashMap<String, MtlInfo> mTls=null;
HashMap<String,Obj3D> mObjs=new HashMap<>();
Obj3D nowObj=null;
MtlInfo nowMtl=null;
try{
String parent;
InputStream inputStream;
if (file.startsWith("assets/")){
isAssets=true;
String path=file.substring(7);
parent=path.substring(0,path.lastIndexOf("/")+1);
inputStream=context.getAssets().open(path);
Log.e("obj",parent);
}else{
isAssets=false;
parent=file.substring(0,file.lastIndexOf("/")+1);
inputStream=new FileInputStream(file);
}
InputStreamReader isr=new InputStreamReader(inputStream);
BufferedReader br=new BufferedReader(isr);
String temps;
while((temps=br.readLine())!=null){
if("".equals(temps)){
}else{
String[] tempsa=temps.split("[ ]+");
switch (tempsa[0].trim()){
case "mtllib": //材质
InputStream stream;
if (isAssets){
stream=context.getAssets().open(parent+tempsa[1]);
}else{
stream=new FileInputStream(parent+tempsa[1]);
}
mTls=readMtl(stream);
break;
case "usemtl": //采用纹理
if(mTls!=null){
nowMtl=mTls.get(tempsa[1]);
}
if(mObjs.containsKey(tempsa[1])){
nowObj=mObjs.get(tempsa[1]);
}else{
nowObj=new Obj3D();
nowObj.mtl=nowMtl;
mObjs.put(tempsa[1],nowObj);
}
break;
case "v": //原始顶点
read(tempsa,oVs);
break;
case "vn": //原始顶点法线
read(tempsa,oVNs);
break;
case "vt":
read(tempsa,oVTs);
break;
case "f":
for (int i=1;i<tempsa.length;i++){
String[] fs=tempsa[i].split("/");
int index;
if(fs.length>0){
//顶点索引
index=Integer.parseInt(fs[0])-1;
nowObj.addVert(oVs.get(index*3));
nowObj.addVert(oVs.get(index*3+1));
nowObj.addVert(oVs.get(index*3+2));
}
if(fs.length>1){
//贴图
index=Integer.parseInt(fs[1])-1;
nowObj.addVertTexture(oVTs.get(index*2));
nowObj.addVertTexture(oVTs.get(index*2+1));
}
if(fs.length>2){
//法线索引
index=Integer.parseInt(fs[2])-1;
nowObj.addVertNorl(oVNs.get(index*3));
nowObj.addVertNorl(oVNs.get(index*3+1));
nowObj.addVertNorl(oVNs.get(index*3+2));
}
}
break;
}
}
}
}catch (Exception e){
e.printStackTrace();
}
for (Map.Entry<String, Obj3D> stringObj3DEntry : mObjs.entrySet()) {
Obj3D obj = stringObj3DEntry.getValue();
data.add(obj);
obj.dataLock();
}
return data;
}
public static HashMap<String, MtlInfo> readMtl(InputStream stream){
HashMap<String,MtlInfo> map=new HashMap<>();
try{
InputStreamReader isr=new InputStreamReader(stream);
BufferedReader br=new BufferedReader(isr);
String temps;
MtlInfo mtlInfo=new MtlInfo();
while((temps=br.readLine())!=null)
{
String[] tempsa=temps.split("[ ]+");
switch (tempsa[0].trim()){
case "newmtl": //材质
mtlInfo=new MtlInfo();
mtlInfo.newmtl=tempsa[1];
map.put(tempsa[1],mtlInfo);
break;
case "illum": //光照模型
mtlInfo.illum=Integer.parseInt(tempsa[1]);
break;
case "Kd":
read(tempsa,mtlInfo.Kd);
break;
case "Ka":
read(tempsa,mtlInfo.Ka);
break;
case "Ke":
read(tempsa,mtlInfo.Ke);
break;
case "Ks":
read(tempsa,mtlInfo.Ks);
break;
case "Ns":
mtlInfo.Ns=Float.parseFloat(tempsa[1]);
case "map_Kd":
mtlInfo.map_Kd=tempsa[1];
break;
}
}
}catch (Exception e){
e.printStackTrace();
}
return map;
}
private static void read(String[] value,ArrayList<Float> list){
for (int i=1;i<value.length;i++){
list.add(Float.parseFloat(value[i]));
}
}
private static void read(String[] value,float[] fv){
for (int i=1;i<value.length&&i<fv.length+1;i++){
fv[i-1]=Float.parseFloat(value[i]);
}
}
}
public class MtlInfo {
public Stringnewmtl;
public float[]Ka=new float[3]; //阴影色
public float[]Kd=new float[3]; //固有色
public float[]Ks=new float[3]; //高光色
public float[]Ke=new float[3]; //
public float Ns; //shininess
public Stringmap_Kd; //固有纹理贴图
public Stringmap_Ks; //高光纹理贴图
public Stringmap_Ka; //阴影纹理贴图
//denotes the illumination model used by the material.
// illum = 1 indicates a flat material with no specular highlights,
// so the value of Ks is not used.
// illum = 2 denotes the presence of specular highlights,
// and so a specification for Ks is required.
public int illum;
}
public class Obj3D {
public FloatBuffervert;
public int vertCount;
public FloatBuffervertNorl;
public FloatBuffervertTexture;
public MtlInfomtl;
private ArrayListtempVert;
private ArrayListtempVertNorl;
public ArrayListtempVertTexture;
public void addVert(float d){
if(tempVert==null){
tempVert=new ArrayList<>();
}
tempVert.add(d);
}
public void addVertTexture(float d){
if(tempVertTexture==null){
tempVertTexture=new ArrayList<>();
}
tempVertTexture.add(d);
}
public void addVertNorl(float d){
if(tempVertNorl==null){
tempVertNorl=new ArrayList<>();
}
tempVertNorl.add(d);
}
public void dataLock(){
if(tempVert!=null){
setVert(tempVert);
tempVert.clear();
tempVert=null;
}
if(tempVertTexture!=null){
setVertTexture(tempVertTexture);
tempVertTexture.clear();
tempVertTexture=null;
}
if(tempVertNorl!=null){
setVertNorl(tempVertNorl);
tempVertNorl.clear();
tempVertNorl=null;
}
}
public void setVert(ArrayList data){
int size=data.size();
ByteBuffer buffer=ByteBuffer.allocateDirect(size*4);
buffer.order(ByteOrder.nativeOrder());
vert=buffer.asFloatBuffer();
for (int i=0;i
vert.put(data.get(i));
}
vert.position(0);
vertCount=size/3;
}
public void setVertNorl(ArrayList data){
int size=data.size();
ByteBuffer buffer=ByteBuffer.allocateDirect(size*4);
buffer.order(ByteOrder.nativeOrder());
vertNorl=buffer.asFloatBuffer();
for (int i=0;i
vertNorl.put(data.get(i));
}
vertNorl.position(0);
}
public void setVertTexture(ArrayList data){
int size=data.size();
ByteBuffer buffer=ByteBuffer.allocateDirect(size*4);
buffer.order(ByteOrder.nativeOrder());
vertTexture=buffer.asFloatBuffer();
for (int i=0;i
vertTexture.put(data.get(i));
i++;
vertTexture.put(data.get(i));
i++;
}
vertTexture.position(0);
}
}
由于篇幅有限,模型坐标文件就不附上了,网上一搜一大把,这样基本就能实现上诉功能了,如有疑问请评论区留言,我看到了一定尽心尽力解答,感谢!