zoukankan      html  css  js  c++  java
  • opengl 学习 之 08 lesson

    opengl 学习 之 08 lesson

    简介

    基础的光照渲染。漫反射,镜面反射,环境光。
    光的主要计算在GLSL里面的碎片着色器中编写。

    link

    http://www.opengl-tutorial.org/uncategorized/2017/06/07/website-update/

    The Diffuse part(漫反射)

    Material Color

    材料颜色,红色的材料会吸收绿色和蓝色。只反射红色。

    简单使用数学公式

    [color = MaterialDiffuseColor * LightColor * cosTheta ]

    Modeling the light

    光模型=点光源

    能量随着距离的平方进行了衰减( In fact, the amount of light will diminish with the square of the distance :)

    [color = MaterialDiffuseColor * LightColor * cosTheta / (distance*distance); ]

    光源的初始能量设定(e.g. 60 Watts) 60W

    [color = MaterialDiffuseColor * LightColor * LightPower * cosTheta / (distance*distance); ]

    The Ambient component

    环境光其实使用墙壁的反射光来模拟,但是这种方式计算成本太高昂。所以使用cheat(欺骗人眼)的方式实现

    就是物体有一个基础的物质光

    vec3 MaterialAmbientColor = vec3(0.1,0.1,0.1) * MaterialDiffuseColor;
    color =
    // Ambient : simulates indirect lighting
    MaterialAmbientColor +
    // Diffuse : "color" of the object
    MaterialDiffuseColor * LightColor * LightPower * cosTheta / (distance*distance) ;
    

    The Specular component(镜面反射)

    // Eye vector (towards the camera)
    vec3 E = normalize(EyeDirection_cameraspace);
    // Direction in which the triangle reflects the light
    vec3 R = reflect(-l,n);
    // Cosine of the angle between the Eye vector and the Reflect vector,
    // clamped to 0
    // - Looking into the reflection -> 1
    // - Looking elsewhere -> < 1
    float cosAlpha = clamp( dot( E,R ), 0,1 );
    color =
     // Ambient : simulates indirect lighting
     MaterialAmbientColor +
     // Diffuse : "color" of the object
     MaterialDiffuseColor * LightColor * LightPower * cosTheta / (distance*distance) ;
     // Specular : reflective highlight, like a mirror
     MaterialSpecularColor * LightColor * LightPower * pow(cosAlpha,5) / (distance*distance);
    

    code

    // Include standard headers
    #include <stdio.h>
    #include <stdlib.h>
    #include <vector>
    
    // Include GLEW
    #include <GL/glew.h>
    
    // Include GLFW
    #include <GLFW/glfw3.h>
    GLFWwindow* window;
    
    // Include GLM
    #include <glm/glm.hpp>
    #include <glm/gtc/matrix_transform.hpp>
    using namespace glm;
    
    #include <common/shader.hpp>
    #include <common/texture.hpp>
    #include <common/controls.hpp>
    #include <common/objloader.hpp>
    #include <common/vboindexer.hpp>
    
    int main( void )
    {
    	// Initialise GLFW
    	if( !glfwInit() )
    	{
    		fprintf( stderr, "Failed to initialize GLFW
    " );
    		getchar();
    		return -1;
    	}
    
    	glfwWindowHint(GLFW_SAMPLES, 4);
    	glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
    	glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
    	glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // To make MacOS happy; should not be needed
    	glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
    
    	// Open a window and create its OpenGL context
    	window = glfwCreateWindow( 1024, 768, "Tutorial 08 - Basic Shading", NULL, NULL);
    	if( window == NULL ){
    		fprintf( stderr, "Failed to open GLFW window. If you have an Intel GPU, they are not 3.3 compatible. Try the 2.1 version of the tutorials.
    " );
    		getchar();
    		glfwTerminate();
    		return -1;
    	}
    	glfwMakeContextCurrent(window);
    
    	// Initialize GLEW
    	glewExperimental = true; // Needed for core profile
    	if (glewInit() != GLEW_OK) {
    		fprintf(stderr, "Failed to initialize GLEW
    ");
    		getchar();
    		glfwTerminate();
    		return -1;
    	}
    
    	// Ensure we can capture the escape key being pressed below
    	glfwSetInputMode(window, GLFW_STICKY_KEYS, GL_TRUE);
        // Hide the mouse and enable unlimited mouvement
        glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);
        
        // Set the mouse at the center of the screen
        glfwPollEvents();
        glfwSetCursorPos(window, 1024/2, 768/2);
    
    	// Dark blue background
    	glClearColor(0.0f, 0.0f, 0.4f, 0.0f);
    
    	// Enable depth test
    	glEnable(GL_DEPTH_TEST);
    	// Accept fragment if it closer to the camera than the former one
    	glDepthFunc(GL_LESS); 
    
    	// Cull triangles which normal is not towards the camera
    	glEnable(GL_CULL_FACE);
    
    	GLuint VertexArrayID;
    	glGenVertexArrays(1, &VertexArrayID);
    	glBindVertexArray(VertexArrayID);
    
    	// Create and compile our GLSL program from the shaders
    	GLuint programID = LoadShaders( "StandardShading.vertexshader", "StandardShading.fragmentshader" );
    
    	// Get a handle for our "MVP" uniform
    	GLuint MatrixID = glGetUniformLocation(programID, "MVP");
    	GLuint ViewMatrixID = glGetUniformLocation(programID, "V");
    	GLuint ModelMatrixID = glGetUniformLocation(programID, "M");
    
    	// Load the texture
    	GLuint Texture = loadDDS("uvmap.DDS");
    	
    	// Get a handle for our "myTextureSampler" uniform
    	GLuint TextureID  = glGetUniformLocation(programID, "myTextureSampler");
    
    	// Read our .obj file
    	std::vector<glm::vec3> vertices;
    	std::vector<glm::vec2> uvs;
    	std::vector<glm::vec3> normals;
    	bool res = loadOBJ("suzanne.obj", vertices, uvs, normals);
    
    	// Load it into a VBO
    
    	GLuint vertexbuffer;
    	glGenBuffers(1, &vertexbuffer);
    	glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
    	glBufferData(GL_ARRAY_BUFFER, vertices.size() * sizeof(glm::vec3), &vertices[0], GL_STATIC_DRAW);
    
    	GLuint uvbuffer;
    	glGenBuffers(1, &uvbuffer);
    	glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
    	glBufferData(GL_ARRAY_BUFFER, uvs.size() * sizeof(glm::vec2), &uvs[0], GL_STATIC_DRAW);
    
    	GLuint normalbuffer;
    	glGenBuffers(1, &normalbuffer);
    	glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
    	glBufferData(GL_ARRAY_BUFFER, normals.size() * sizeof(glm::vec3), &normals[0], GL_STATIC_DRAW);
    
    	// Get a handle for our "LightPosition" uniform
    	glUseProgram(programID);
    	GLuint LightID = glGetUniformLocation(programID, "LightPosition_worldspace");
    
    	do{
    
    		// Clear the screen
    		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
    
    		// Use our shader
    		glUseProgram(programID);
    
    		// Compute the MVP matrix from keyboard and mouse input
    		computeMatricesFromInputs();
    		glm::mat4 ProjectionMatrix = getProjectionMatrix();
    		glm::mat4 ViewMatrix = getViewMatrix();
    		glm::mat4 ModelMatrix = glm::mat4(1.0);
    		glm::mat4 MVP = ProjectionMatrix * ViewMatrix * ModelMatrix;
    
    		// Send our transformation to the currently bound shader, 
    		// in the "MVP" uniform
    		glUniformMatrix4fv(MatrixID, 1, GL_FALSE, &MVP[0][0]);
    		glUniformMatrix4fv(ModelMatrixID, 1, GL_FALSE, &ModelMatrix[0][0]);
    		glUniformMatrix4fv(ViewMatrixID, 1, GL_FALSE, &ViewMatrix[0][0]);
    
    		glm::vec3 lightPos = glm::vec3(4,4,4);
    		glUniform3f(LightID, lightPos.x, lightPos.y, lightPos.z);
    
    		// Bind our texture in Texture Unit 0
    		glActiveTexture(GL_TEXTURE0);
    		glBindTexture(GL_TEXTURE_2D, Texture);
    		// Set our "myTextureSampler" sampler to use Texture Unit 0
    		glUniform1i(TextureID, 0);
    
    		// 1rst attribute buffer : vertices
    		glEnableVertexAttribArray(0);
    		glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
    		glVertexAttribPointer(
    			0,                  // attribute
    			3,                  // size
    			GL_FLOAT,           // type
    			GL_FALSE,           // normalized?
    			0,                  // stride
    			(void*)0            // array buffer offset
    		);
    
    		// 2nd attribute buffer : UVs
    		glEnableVertexAttribArray(1);
    		glBindBuffer(GL_ARRAY_BUFFER, uvbuffer);
    		glVertexAttribPointer(
    			1,                                // attribute
    			2,                                // size
    			GL_FLOAT,                         // type
    			GL_FALSE,                         // normalized?
    			0,                                // stride
    			(void*)0                          // array buffer offset
    		);
    
    		// 3rd attribute buffer : normals
    		glEnableVertexAttribArray(2);
    		glBindBuffer(GL_ARRAY_BUFFER, normalbuffer);
    		glVertexAttribPointer(
    			2,                                // attribute
    			3,                                // size
    			GL_FLOAT,                         // type
    			GL_FALSE,                         // normalized?
    			0,                                // stride
    			(void*)0                          // array buffer offset
    		);
    
    		// Draw the triangles !
    		glDrawArrays(GL_TRIANGLES, 0, vertices.size() );
    
    		glDisableVertexAttribArray(0);
    		glDisableVertexAttribArray(1);
    		glDisableVertexAttribArray(2);
    
    		// Swap buffers
    		glfwSwapBuffers(window);
    		glfwPollEvents();
    
    	} // Check if the ESC key was pressed or the window was closed
    	while( glfwGetKey(window, GLFW_KEY_ESCAPE ) != GLFW_PRESS &&
    		   glfwWindowShouldClose(window) == 0 );
    
    	// Cleanup VBO and shader
    	glDeleteBuffers(1, &vertexbuffer);
    	glDeleteBuffers(1, &uvbuffer);
    	glDeleteBuffers(1, &normalbuffer);
    	glDeleteProgram(programID);
    	glDeleteTextures(1, &Texture);
    	glDeleteVertexArrays(1, &VertexArrayID);
    
    	// Close OpenGL window and terminate GLFW
    	glfwTerminate();
    
    	return 0;
    }
    
    
    
    #version 330 core
    
    // Input vertex data, different for all executions of this shader.
    layout(location = 0) in vec3 vertexPosition_modelspace;
    layout(location = 1) in vec2 vertexUV;
    layout(location = 2) in vec3 vertexNormal_modelspace;
    
    // Output data ; will be interpolated for each fragment.
    out vec2 UV;
    out vec3 Position_worldspace;
    out vec3 Normal_cameraspace;
    out vec3 EyeDirection_cameraspace;
    out vec3 LightDirection_cameraspace;
    
    // Values that stay constant for the whole mesh.
    uniform mat4 MVP;
    uniform mat4 V;
    uniform mat4 M;
    uniform vec3 LightPosition_worldspace;
    
    void main(){
    
    	// Output position of the vertex, in clip space : MVP * position
    	gl_Position =  MVP * vec4(vertexPosition_modelspace,1);
    	
    	// Position of the vertex, in worldspace : M * position
    	Position_worldspace = (M * vec4(vertexPosition_modelspace,1)).xyz;
    	
    	// Vector that goes from the vertex to the camera, in camera space.
    	// In camera space, the camera is at the origin (0,0,0).
    	vec3 vertexPosition_cameraspace = ( V * M * vec4(vertexPosition_modelspace,1)).xyz;
    	EyeDirection_cameraspace = vec3(0,0,0) - vertexPosition_cameraspace;
    
    	// Vector that goes from the vertex to the light, in camera space. M is ommited because it's identity.
    	vec3 LightPosition_cameraspace = ( V * vec4(LightPosition_worldspace,1)).xyz;
    	LightDirection_cameraspace = LightPosition_cameraspace + EyeDirection_cameraspace;
    	
    	// Normal of the the vertex, in camera space
    	Normal_cameraspace = ( V * M * vec4(vertexNormal_modelspace,0)).xyz; // Only correct if ModelMatrix does not scale the model ! Use its inverse transpose if not.
    	
    	// UV of the vertex. No special space for this one.
    	UV = vertexUV;
    }
    
    
    #version 330 core
    
    // Interpolated values from the vertex shaders
    in vec2 UV;
    in vec3 Position_worldspace;
    in vec3 Normal_cameraspace;
    in vec3 EyeDirection_cameraspace;
    in vec3 LightDirection_cameraspace;
    
    // Ouput data
    out vec3 color;
    
    // Values that stay constant for the whole mesh.
    uniform sampler2D myTextureSampler;
    uniform mat4 MV;
    uniform vec3 LightPosition_worldspace;
    
    void main(){
    
    	// Light emission properties
    	// You probably want to put them as uniforms
    	vec3 LightColor = vec3(1,1,1);
    	float LightPower = 50.0f;
    	
    	// Material properties
    	vec3 MaterialDiffuseColor = texture( myTextureSampler, UV ).rgb;
    	vec3 MaterialAmbientColor = vec3(0.1,0.1,0.1) * MaterialDiffuseColor;
    	vec3 MaterialSpecularColor = vec3(0.3,0.3,0.3);
    
    	// Distance to the light
    	float distance = length( LightPosition_worldspace - Position_worldspace );
    
    	// Normal of the computed fragment, in camera space
    	vec3 n = normalize( Normal_cameraspace );
    	// Direction of the light (from the fragment to the light)
    	vec3 l = normalize( LightDirection_cameraspace );
    	// Cosine of the angle between the normal and the light direction, 
    	// clamped above 0
    	//  - light is at the vertical of the triangle -> 1
    	//  - light is perpendicular to the triangle -> 0
    	//  - light is behind the triangle -> 0
    	float cosTheta = clamp( dot( n,l ), 0,1 );
    	
    	// Eye vector (towards the camera)
    	vec3 E = normalize(EyeDirection_cameraspace);
    	// Direction in which the triangle reflects the light
    	vec3 R = reflect(-l,n);
    	// Cosine of the angle between the Eye vector and the Reflect vector,
    	// clamped to 0
    	//  - Looking into the reflection -> 1
    	//  - Looking elsewhere -> < 1
    	float cosAlpha = clamp( dot( E,R ), 0,1 );
    	
    	color = 
    		// Ambient : simulates indirect lighting
    		MaterialAmbientColor +
    		// Diffuse : "color" of the object
    		MaterialDiffuseColor * LightColor * LightPower * cosTheta / (distance*distance) +
    		// Specular : reflective highlight, like a mirror
    		MaterialSpecularColor * LightColor * LightPower * pow(cosAlpha,5) / (distance*distance);
    
    }
    

    image

    经过调整环境光的显示图片。

    Hope is a good thing,maybe the best of things,and no good thing ever dies.----------- Andy Dufresne
  • 相关阅读:
    最容易被淘汰的八种人
    java基础编程——用两个栈来实现一个队列
    java基础编程——重建二叉树
    java基础——多线程
    java基础编程——链表反转
    java基础——线程池
    java基础——线程
    java基础编程——二维数组中的查找
    网络编程——TCP协议和通信
    网络编程——UDP协议和通信
  • 原文地址:https://www.cnblogs.com/eat-too-much/p/14069767.html
Copyright © 2011-2022 走看看