Custom shader not working on Android

I have placed the planetary shader from Shadertoy in Unity as Image Effect, which is attached to the camera. It works fine in editor and offline build of Windows. It doesn't work on Android devices. It flashes blue and black images on Android.

This is how it looks in the Unity Editor and Windows Build:

6Ihy6i0.gif

This is how it looks on Android:

Xice8RC.gif

Ported shader code:

Shader "Hidden/Plasma Space Ball Image Effect"
{
    Properties
    {
        iChannel0("iChannel0", 2D) = "white" {}
    //[MaterialToggle] _isToggled("isToggle", Float) = 0
    }
        SubShader
    {
        // No culling or depth
        Cull Off ZWrite Off ZTest Always

        Pass
        {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag

            #include "UnityCG.cginc"

            struct appdata
            {
                float4 vertex : POSITION;
                float2 uv : TEXCOORD0;
            };

            struct v2f
            {
                float2 uv : TEXCOORD0;
                float4 vertex : SV_POSITION;
            };

            v2f vert(appdata v)
            {
                v2f o;
                o.vertex = UnityObjectToClipPos(v.vertex);
                o.uv = v.uv;
                return o;
            }

            sampler2D iChannel0;

            //Ported from https://www.shadertoy.com/view/MstXzf

            float3 hb(float2 pos, float t, float time, float2 rot, float size, sampler2D tex0)
            {
                float2 newUv = 0.2*(pos / (1.2 - t) + 0.5*time*rot);
                //float texSample = texture(tex0, newUv).b;
                float texSample = tex2D(tex0, newUv).b;
                float uOff = 0.2*(texSample + 0.3*time);     //lsf3RH
                float2 starUV = newUv + float2(uOff, 0.0);
                //return float3(0.3, 0.3, 1.0) + 1.3*texture(tex0, starUV).b;
                return float3(0.3, 0.3, 1.0) + 1.3*tex2D(tex0, starUV).b;
            }

            float4 blob(float2 uv, float size, float time, sampler2D tex0)
            {
                float2 center = float2(0., 0.);

                float2 pos = center - uv;
                float t = length(pos);
                float st = size - t;

                float2 rot = 0.005*float2(sin(time / 16.), sin(time / 12.)); //MslGWN

                float alpha = smoothstep(0.0, 0.2*size, st);

                float3 col = hb(pos, t, time, rot, size, tex0);
                float a1 = smoothstep(-1.4, -1.0, -col.b);
                col = lerp(col, hb(pos, t, -time, -rot, size, tex0), a1);

                col += 0.8*exp(-12.*abs(t - 0.8*size) / size);
                float a2 = smoothstep(-1.4, -1.0, -col.b);

                alpha -= a2;

                //float crosshair = float((abs(pos.x) < 0.005 && abs(pos.y) < 0.15) || (abs(pos.y) < 0.005&&abs(pos.x) < 0.15));
                //return float4(col, alpha) + crosshair;

                return float4(col, alpha);
            }

            float4 main_(float2 uv, float size)
            {
                return blob(uv, size, _Time.y, iChannel0);
            }


            fixed4 frag(v2f i) : SV_Target
            {
                float4 fragColor = 0;
                float2 fragCoord = i.vertex.xy;

                ///---------------------------------------------------

                float2 uv = fragCoord.xy / _ScreenParams.xy;
                float2 cr = uv*2. - 1.;
                cr.x *= _ScreenParams.x / _ScreenParams.y;

                //late addition to elaborate background motion, could be reused later on
                float2 rot = 0.5*float2(sin(_Time.y / 16.), sin(_Time.y / 12.));

                float4 ball = clamp(main_(cr, sin(_Time.y)*0.05 + 0.5 + 0.5), 0., 1.);
                //float3 bg = float3(0.7, 0.7, 1.0)*texture(iChannel0, uv + rot + 0.1*ball.rb).b;
                float3 bg = float3(0.7, 0.7, 1.0)*tex2D(iChannel0, uv + rot + 0.1*ball.rb).b;

                //simulated gl blend
                fragColor = float4(lerp(bg, ball.rgb, ball.a), 1.0);
                //fragColor = lerp(fragColor,tex2D(iChannel0, i.uv).rgba,.5);
                return fragColor;
            }
            ENDCG
        }
    }
}

      


You can find the image that is used for the input slot iChannel0

here in the Shader above.

Things I've tried:

  • Adding a shader to the graphics settings so that Unity will enable it during the build process.
  • Disabling Auto Graphics API and trying OpenGLES2 and OpenGLES3.
  • Checking the log using Android Studio. No errors / warnings.

None of these problems solved the problem and I was unable to try.

Information about the software and the device, if it helps:

  • Unity 5.6.0f3
  • Android 4.4.2

This is used for teaching and educational purposes as I am learning GLSL, HLSL, CG / shaderlab shader language. I just want to know why the ported shader doesn't work as expected on Android devices.

Why does Android display blue and black images?

+3


source to share


1 answer


You need to use VPOS semantics for the positions in the fragment shader for OpenGLES2.
From the Unity Docs :

A fragment shader can get the position of a pixel, which is rendered as special VPOS semantics. This feature has only existed since shader model 3.0, so the shader needs to set the compilation target #pragma 3.0.

So, to get screen seats:

// note: no SV_POSITION in this struct
struct v2f {
    float2 uv : TEXCOORD0;
};

v2f vert (
    float4 vertex : POSITION, // vertex position input
    float2 uv : TEXCOORD0, // texture coordinate input
    out float4 outpos : SV_POSITION // clip space position output
    )
{
    v2f o;
    o.uv = uv;
    outpos = UnityObjectToClipPos(vertex);
    return o;
}

      


fixed4 frag (v2f i, UNITY_VPOS_TYPE screenPos : VPOS) : SV_Target
{
    // screenPos.xy will contain pixel integer coordinates.
    float4 fragColor = 0;
    float2 fragCoord = screenPos;

      

But you are already going into uvs, so maybe you can use them?

float2 uv = i.uv;

      




It turns out I was wrong. You don't get the position of the clip space in the fragment shader in OpenGLES2 that you get. 0. (Maybe someone can explain this?)

I made a small test shader:

CGPROGRAM
#pragma vertex vert
#pragma fragment frag

float4 vert (float4 vertex : POSITION) : SV_Position
{                   
    return UnityObjectToClipPos(vertex);                
}

fixed4 frag (float4 screenPos : SV_Position) : SV_Target
{   
    float uvx = screenPos.x/_ScreenParams.x;
    return float4(uvx, 0., 0., 1.);
}
ENDCG

      

and the line will float uvx = screenPos.x/_ScreenParams.x;

compile as
tmpvar_2.x = (0.0 / _ScreenParams.x); // OpenGLES2


u_xlat0 = gl_FragCoord.x / _ScreenParams.x; // OpenGLES3


But if you use semantics VPOS


fixed4 frag (float4 screenPos : VPOS) : SV_Target

the same line is compiled as
tmpvar_2.x = (gl_FragCoord.x / _ScreenParams.x); // OpenGLES2


u_xlat0 = gl_FragCoord.x / _ScreenParams.x; // OpenGLES3


So, for OpenGLES2 it looks like you need to use VPOS semantics to get the screen space positions in the fragment shader.

+2


source







All Articles