OSX EGL Initialization Issues

This topic contains 3 replies, has 3 voices, and was last updated by  qiankanglai 2 years, 9 months ago.

Viewing 4 posts - 1 through 4 (of 4 total)
  • Author
    Posts
  • #31855

    ROm
    Member

    As the title says, Im having a few issues, the first is that regardless the way Im initializing the EGL context the configuration attributes that Im passing are always ignored… In previous version that was working fine… Im initializing the context like this:

    	EGLDisplay	m_EGLDisplay;
    EGLSurface m_EGLWindow;
    EGLContext m_EGLContext;

    EGLNativeDisplayType m_NDT;
    EGLNativePixmapType m_NPT;
    EGLNativeWindowType m_NWT;

    m_NDT = (EGLNativeDisplayType)NULL;
    m_NPT = (EGLNativePixmapType)NULL;
    m_NWT = (EGLNativeWindowType)window->ns.view;

    EGLConfig config[ 16 ];

    EGLint attribs[] = { EGL_CONTEXT_CLIENT_VERSION,
    3/* or 2 */,
    EGL_NONE },

    config_attr[] = { EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
    EGL_RED_SIZE , 5,
    EGL_GREEN_SIZE , 6,
    EGL_BLUE_SIZE , 5,
    EGL_ALPHA_SIZE , 0,
    EGL_DEPTH_SIZE , 24,
    EGL_STENCIL_SIZE , 8,
    EGL_SAMPLE_BUFFERS , 0,
    EGL_SAMPLES , 0,
    EGL_SURFACE_TYPE , EGL_WINDOW_BIT,
    EGL_NONE },
    version_major,
    version_minor,
    num_config;

    m_EGLDisplay = eglGetDisplay( m_NDT );

    eglInitialize( m_EGLDisplay,
    &version_major,
    &version_minor );

    eglBindAPI( EGL_OPENGL_ES_API );

    fprintf( stderr, "EGL_VENDOR: %sn" , ( char * )eglQueryString( m_EGLDisplay, EGL_VENDOR ) );
    fprintf( stderr, "EGL_VERSION: %sn" , ( char * )eglQueryString( m_EGLDisplay, EGL_VERSION ) );
    fprintf( stderr, "EGL_CLIENT_APIS: %sn", ( char * )eglQueryString( m_EGLDisplay, EGL_CLIENT_APIS ) );

    EGLint i = 0,
    id,
    size,
    red,
    green,
    blue,
    alpha,
    depth,
    stencil,
    sample_buffers,
    samples;

    eglGetConfigs( m_EGLDisplay,
    config,
    16,
    &num_config );

    while( i != num_config )
    {
    eglGetConfigAttrib( m_EGLDisplay, config[ i ], EGL_CONFIG_ID , &id );
    eglGetConfigAttrib( m_EGLDisplay, config[ i ], EGL_BUFFER_SIZE , &size );
    eglGetConfigAttrib( m_EGLDisplay, config[ i ], EGL_RED_SIZE , &red );
    eglGetConfigAttrib( m_EGLDisplay, config[ i ], EGL_GREEN_SIZE , &green );
    eglGetConfigAttrib( m_EGLDisplay, config[ i ], EGL_BLUE_SIZE , &blue );
    eglGetConfigAttrib( m_EGLDisplay, config[ i ], EGL_ALPHA_SIZE , &alpha );
    eglGetConfigAttrib( m_EGLDisplay, config[ i ], EGL_DEPTH_SIZE , &depth );
    eglGetConfigAttrib( m_EGLDisplay, config[ i ], EGL_STENCIL_SIZE , &stencil );
    eglGetConfigAttrib( m_EGLDisplay, config[ i ], EGL_SAMPLE_BUFFERS, &sample_buffers );
    eglGetConfigAttrib( m_EGLDisplay, config[ i ], EGL_SAMPLES , &samples );

    fprintf( stderr,
    "EGL_CONFIG%d: Bits:%d R:%d G:%d B:%d A:%d D:%d St:%d Sb:%d Sa:%dn",
    id,
    size,
    red,
    green,
    blue,
    alpha,
    depth,
    stencil,
    sample_buffers,
    samples );
    ++i;
    }

    fprintf( stderr, "EGL_EXTENSIONS: %snn" , ( char * )eglQueryString( m_EGLDisplay, EGL_EXTENSIONS ) );


    eglChooseConfig( m_EGLDisplay,
    config_attr,
    &config[ 0 ],
    1,
    &num_config );

    m_EGLWindow = eglCreateWindowSurface( m_EGLDisplay,
    config[ 0 ],
    m_NWT,
    NULL );

    m_EGLContext = eglCreateContext( m_EGLDisplay,
    config[ 0 ],
    EGL_NO_CONTEXT,
    attribs );

    eglMakeCurrent( m_EGLDisplay,
    m_EGLWindow,
    m_EGLWindow,
    m_EGLContext );

    The EGL_CONFIGs that are returned are always the same, which explain why Im always getting a 32bits with multisampling context:

    EGL_CONFIG1: Bits:32 R:8 G:8 B:8 A:8 D:24 St:8 Sb:1 Sa:4
    EGL_CONFIG2: Bits:32 R:8 G:8 B:8 A:8 D:24 St:8 Sb:1 Sa:4
    EGL_CONFIG3: Bits:32 R:8 G:8 B:8 A:8 D:24 St:8 Sb:1 Sa:4
    EGL_CONFIG4: Bits:32 R:8 G:8 B:8 A:8 D:24 St:8 Sb:1 Sa:4
    EGL_CONFIG5: Bits:32 R:8 G:8 B:8 A:8 D:24 St:8 Sb:1 Sa:4
    EGL_CONFIG6: Bits:32 R:8 G:8 B:8 A:8 D:24 St:8 Sb:1 Sa:4

    1.
    Is there at least a workaround to fix this… or at least disable multisampling (slows down a lot in my case)?

    2.
    The second problem that Im having is that, on my MBP Im having 2 video cards, 1 Nvidia (which PVRVframe pickup and work fine on it) and an Intel HD… For the Intel card PVRVframe failed to pick it up and Im falling back in software renderer… (plus notice below that in both case the GL_VERSION query fail…)

    When I select the Nvidia card:

    GL_VENDOR: Imagination Technologies (Host: NVIDIA Corporation)
    GL_RENDERER: PVRVFrame 10.0 - None (Host : NVIDIA GeForce GT 330M OpenGL Engine) (SDK Build: 3.4@3186613)
    GL_VERSION: (Host : 3.3 NVIDIA-8.24.16 310.90.9.05f01)
    GL_SHADING_LANGUAGE_VERSION: OpenGL ES GLSL ES 1.00 (Host: 3.30)
    GL_EXTENSIONS: GL_APPLE_copy_texture_levels GL_APPLE_sync GL_APPLE_texture_max_level GL_EXT_blend_minmax GL_EXT_color_buffer_float GL_EXT_debug_marker GL_EXT_discard_framebuffer GL_EXT_draw_buffers GL_EXT_multi_draw_arrays GL_EXT_multisampled_render_to_texture GL_EXT_occlusion_query_boolean GL_EXT_robustness GL_EXT_shader_texture_lod GL_EXT_sRGB GL_EXT_texture_filter_anisotropic GL_EXT_texture_rg GL_EXT_texture_storage GL_EXT_texture_sRGB_decode GL_EXT_texture_type_2_10_10_10_REV GL_IMG_multisampled_render_to_texture GL_IMG_program_binary GL_IMG_read_format GL_IMG_shader_binary GL_IMG_texture_compression_pvrtc GL_IMG_texture_compression_pvrtc2 GL_IMG_texture_npot GL_IMG_texture_stream GL_IMG_texture_stream2 GL_IMG_uniform_buffer_object GL_IMG_vertex_array_object GL_KHR_debug GL_KHR_blend_equation_advanced GL_OES_compressed_ETC1_RGB8_texture GL_OES_depth_texture GL_OES_depth_texture_cube_map GL_OES_EGL_image_external GL_OES_egl_sync GL_OES_element_index_uint GL_OES_fragment_precision_high GL_OES_get_program_binary GL_OES_mapbuffer GL_OES_packed_depth_stencil GL_OES_read_format GL_OES_required_internalformat GL_OES_sample_shading GL_OES_sample_variables GL_OES_shader_image_atomic GL_OES_shader_multisample_interpolation GL_OES_standard_derivatives GL_OES_stencil_wrap GL_OES_surfaceless_context GL_OES_texture_mirrored_repeat GL_OES_texture_stencil8 GL_OES_texture_storage_multisample_2d_array GL_OES_vertex_array_object

    When I select the Intel card using gfxCardStatus:

    GL_VENDOR: Imagination Technologies (Host: Apple Computer, Inc.)
    GL_RENDERER: PVRVFrame 10.0 - None (Host : Apple Software Renderer) (SDK Build: 3.4@3186613)
    GL_VERSION: (Host : 4.1 APPLE-9.6.1)
    GL_SHADING_LANGUAGE_VERSION: OpenGL ES GLSL ES 1.00 (Host: 4.10)
    GL_EXTENSIONS: GL_APPLE_copy_texture_levels GL_APPLE_sync GL_APPLE_texture_max_level GL_EXT_blend_minmax GL_EXT_color_buffer_float GL_EXT_debug_marker GL_EXT_discard_framebuffer GL_EXT_draw_buffers GL_EXT_multi_draw_arrays GL_EXT_multisampled_render_to_texture GL_EXT_occlusion_query_boolean GL_EXT_robustness GL_EXT_shader_texture_lod GL_EXT_sRGB GL_EXT_texture_filter_anisotropic GL_EXT_texture_rg GL_EXT_texture_storage GL_EXT_texture_sRGB_decode GL_EXT_texture_type_2_10_10_10_REV GL_IMG_multisampled_render_to_texture GL_IMG_program_binary GL_IMG_read_format GL_IMG_shader_binary GL_IMG_texture_compression_pvrtc GL_IMG_texture_compression_pvrtc2 GL_IMG_texture_npot GL_IMG_texture_stream GL_IMG_texture_stream2 GL_IMG_uniform_buffer_object GL_IMG_vertex_array_object GL_KHR_debug GL_KHR_blend_equation_advanced GL_OES_compressed_ETC1_RGB8_texture GL_OES_depth_texture GL_OES_depth_texture_cube_map GL_OES_EGL_image_external GL_OES_egl_sync GL_OES_element_index_uint GL_OES_fragment_precision_high GL_OES_get_program_binary GL_OES_mapbuffer GL_OES_packed_depth_stencil GL_OES_read_format GL_OES_required_internalformat GL_OES_sample_shading GL_OES_sample_variables GL_OES_shader_image_atomic GL_OES_shader_multisample_interpolation GL_OES_standard_derivatives GL_OES_stencil_wrap GL_OES_surfaceless_context GL_OES_texture_mirrored_repeat GL_OES_texture_stencil8 GL_OES_texture_storage_multisample_2d_array GL_OES_vertex_array_object

    3.
    Im also receiving random crash when building/restarting my App (on both card btw):

    /Users/autobuild/buildxl/buildroot/sdk/branch/UtilitiesSrc/Common/PVRPreferences/PVRPreferences.cpp
    WARNING: No declaration found at the start. The declaration will be recreated.

    4.
    In addition changing the client version bit have no effect:

    	EGLint attribs[] = { EGL_CONTEXT_CLIENT_VERSION,
    2,
    EGL_NONE }

    	EGLint attribs[] = { EGL_CONTEXT_CLIENT_VERSION,
    3,
    EGL_NONE }

    I was hoping that changing 2 to 3 or vice versa would give me a correct GL_VERSION and GL_SHADING_LANGUAGE version… It it crucial in my App as I need to adjust the GLSL code generation, and functionalities / function extensions in my App…

    Tks in advance for your reply… Hope that there’s some workaround for theses issues, or they can be fixed soon…

    #39085

    chris
    Moderator

    Hi ROm, thanks for reporting these issues

    1. As you’ve seen there’s currently a nasty bug in eglChooseConfig where only the first matching config is returned. A temporary workaround would be to use eglGetConfigs to return a list of all available configs and select and appropriate one manually.

    2. We’re currently working on better support for intel graphics. For the time being you are probably best off sticking with the nvidia card. There’s another bug in glGetString where GL_VERSION fails for anything above ES 2.0.

    These two bugs will be fixed in the next release, which will be early next year, but we should be releasing beta versions around December.

    3. I haven’t seen this crash before, but I’ll file a bug and try to reproduce it. I assume you mean this only happens when restarting the app in Xcode?

    4. The version of context returned by eglCreateContext also depends on the value of EGL_RENDERABLE_TYPE in the selected EGL config. In PVRVFrame you’ll always be getting one of a set of hard-coded EGL configs, and all ES2_BIT configs also support ES3_BIT_KHR, meaning you’ll always get an ES 3.0 context. I realise this isn’t really ideal so I’ll file a bug report to add a better selection of configs. This should also allow you to select a non-multisampled config too.

    Again I’ll do my best to push these fixes in for the beta around December time. I hope these issues don’t slow you down too much in the meantime!

    #39086

    ROm
    Member

    1. I think about it but… as you can in my code output when I enumerate the configs they are all the same… 😉

    2. Would be great if we can select to either initialize GLES2 or GLES3, that makes it ALOT easier to test our apps using a specific profile. I personally really need that since Im developing an editor and selecting the profile for the project is crucial 😉 Do not use the EGL_OPENGL_ES3_BIT_KHR its basically an ext, simply use the standard EGL_OPENGL_ES2_BIT, and toggle GLES2/GLES3 context creation with the EGL_CONTEXT_CLIENT_VERSION. I believe that’s what make more sense… (see answer for #4 for more info…)

    3. Yes it is using Xcode, it typically append when my app is running then hit build and run. The app close then the new ly compiled version launched and can’t create the context. Another thing that happen is sometimes it gets the context (as I can see on the console its initialized) but the app just hang there (at eglMakeCurrent)… I have to restart again to start it up… not sure its related but…

    4. Hmmm the way I believe it should be done (my 0.02$) is like this: when EGL_CONTEXT_CLIENT_VERSION is set to 2 a compatible OpenGL 2.1 context basically NSOpenGLProfileVersionLegacy. And when ES3 is selected initialize it internally with NSOpenGLProfileVersion3_2Core and up… On Linux and Windows its basically the same thing, glXCreateContextAttribsARB with GLX_CONTEXT_CORE_PROFILE_BIT_ARB and wglCreateContextAttribsARB WGL_CONTEXT_CORE_PROFILE_BIT_ARB or else (the latest version supported by the driver).

    #39087

    I’m experiencing the same issue 3 here. It seems the library cannot find the XML file when initialization and crashed

Viewing 4 posts - 1 through 4 (of 4 total)
You must be logged in to reply to this topic.