Texture2D diffuseTexture : register(t0,space2); SamplerState smp : register(s0,space2); struct PSInput { float4 position : SV_POSITION; float2 texcoord : TEXCOORD0; }; float ditherPattern2x2[4] = { 0.0, 0.5, 0.75, 0.25 }; float4 main(PSInput input) : SV_TARGET { // Sample texture with nearest-neighbor float4 color = diffuseTexture.Sample(smp, input.texcoord); // Optional: Affine distortion effect // If you want to simulate affine warping, you could do some screen-space // dependent manipulation of texcoords. For simplicity, we won't do that here, // but one trick is to modify the texcoord by input.position.z or w in some // simplified manner. The vertex shader step is often enough to simulate "no perspective". // Dithering (optional): // Compute a screen-space coordinate from SV_POSITION // Use a small dithering pattern int x = (int)input.position.x; int y = (int)input.position.y; int idx = (y & 1) * 2 + (x & 1); float dither = ditherPattern2x2[idx]; // To simulate PS1 color quantization (e.g. to 5 bits for R,G,B): // We'll quantize each channel. // Suppose colorBitDepth.x = 5 means 5 bits for R/G/B, that’s 32 steps. float3 colorBitDepth = float3(5,5,5); float stepsRGB = pow(2.0, colorBitDepth.x); float stepsA = pow(2.0, colorBitDepth.y); // Add dithering before quantization to reduce banding // Adjust dithering scale if desired float ditherStrength = 1.0 / stepsRGB; // dither scale can be tweaked float4 colorDithered = color + ditherStrength * dither; // Clamp after dithering colorDithered = saturate(colorDithered); // Quantize float3 quantizedRGB = round(colorDithered.rgb * (stepsRGB - 1.0)) / (stepsRGB - 1.0); float quantizedA = round(colorDithered.a * (stepsA - 1.0)) / (stepsA - 1.0); float4 finalColor = float4(quantizedRGB, quantizedA); return finalColor; }