You have to use glReadPixels
. It use to be easier to implement by just calling it from the C# in the OnPostRender
function but you can't do that anymore. You have to use GL.IssuePluginEvent
to call that function that will take the screenshot.
You also need Unity C++ API headers(IUnityInterface.h and IUnityGraphics.h) located at <UnityInstallationDirecory>EditorDataPluginAPI
.
I created a folder called UnityPluginHeaders and put both IUnityInterface.h and IUnityGraphics.h header files inside it so that they can be imported with #include "UnityPluginHeaders/IUnityInterface.h"
and #include "UnityPluginHeaders/IUnityGraphics.h"
.
C++ (ScreenPointPixel.h
):
#ifndef ANDROIDSCREENSHOT_NATIVE_LIB_H
#define ANDROIDSCREENSHOT_NATIVE_LIB_H
#define DLLExport __declspec(dllexport)
extern "C"
{
#if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) || defined(_WIN64) || defined(WINAPI_FAMILY)
DLLExport void initScreenPointPixel(void* buffer, int x, int y, int width, int height);
DLLExport void updateScreenPointPixelBufferPointer(void* buffer);
DLLExport void updateScreenPointPixelCoordinate(int x, int y);
DLLExport void updateScreenPointPixelSize(int width, int height);
int GetScreenPixels(void* buffer, int x, int y, int width, int height);
#else
void initScreenPointPixel(void *buffer, int x, int y, int width, int height);
void updateScreenPointPixelBufferPointer(void *buffer);
void updateScreenPointPixelCoordinate(int x, int y);
void updateScreenPointPixelSize(int width, int height);
int GetScreenPixels(void *buffer, int x, int y, int width, int height);
#endif
}
#endif //ANDROIDSCREENSHOT_NATIVE_LIB_H
C++ (ScreenPointPixel.cpp
):
#include "ScreenPointPixel.h"
#include <string>
#include <stdlib.h>
//For Debugging
//#include "DebugCPP.h"
//http://stackoverflow.com/questions/43732825/use-debug-log-from-c/43735531#43735531
//Unity Headers
#include "UnityPluginHeaders/IUnityInterface.h"
#include "UnityPluginHeaders/IUnityGraphics.h"
//Headers for Windows
#if defined(WIN32) || defined(_WIN32) || defined(__WIN32__) || defined(_WIN64) || defined(WINAPI_FAMILY)
#include <windows.h>
#include <gl/GL.h>
#include <gl/GLU.h>
#include <stdlib.h>
#include "glext.h"
#pragma comment(lib, "opengl32.lib")
//--------------------------------------------------
//Headers for Android
#elif defined(ANDROID) || defined(__ANDROID__)
#include <jni.h>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
//Link lGLESv2 in the CMakeList.txt file
//LOCAL_LDLIBS += ?lGLESv2
//--------------------------------------------------
//Headers for MAC and iOS
//http://nadeausoftware.com/articles/2012/01/c_c_tip_how_use_compiler_predefined_macros_detect_operating_system
#elif defined(__APPLE__) && defined(__MACH__)
//Apple OSX and iOS (Darwin)
#include <TargetConditionals.h>
#if TARGET_IPHONE_SIMULATOR == 1
//iOS in Xcode simulator
#include <OpenGLES/ES2/gl.h>
#include <OpenGLES/ES2/glext.h>
#elif TARGET_OS_IPHONE == 1
//iOS on iPhone, iPad, etc.
#include <OpenGLES/ES2/gl.h>
#include <OpenGLES/ES2/glext.h>
#elif TARGET_OS_MAC == 1
#include <OpenGL/gl.h>
#include <OpenGL/glu.h>
#include <GLUT/glut.h>
#endif
//--------------------------------------------------
//Headers for Linux
#elif defined(__linux__)
#include <GL/gl.h>
#include <GL/glu.h>
#endif
static void* screenPointPixelData = nullptr;
static int _x;
static int _y;
static int _width;
static int _height;
//----------------------------Enable Screenshot-----------------------------
void initScreenPointPixel(void* buffer, int x, int y, int width, int height) {
screenPointPixelData = buffer;
_x = x;
_y = y;
_width = width;
_height = height;
}
void updateScreenPointPixelBufferPointer(void* buffer) {
screenPointPixelData = buffer;
}
void updateScreenPointPixelCoordinate(int x, int y) {
_x = x;
_y = y;
}
void updateScreenPointPixelSize(int width, int height) {
_width = width;
_height = height;
}
int GetScreenPixels(void* buffer, int x, int y, int width, int height) {
if (glGetError())
return -1;
//glReadPixels(x, y, width, height, GL_BGRA, GL_UNSIGNED_INT_8_8_8_8_REV, buffer);
glReadPixels(x, y, width, height, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
if (glGetError())
return -2;
return 0;
}
//----------------------------UNITY RENDERING CALLBACK-----------------------------
// Plugin function to handle a specific rendering event
static void UNITY_INTERFACE_API OnRenderEventScreenPointPixel(int eventID)
{
//Put rendering code below
if (screenPointPixelData == nullptr) {
//Debug::Log("Pointer is null", Color::Red);
return;
}
int result = GetScreenPixels(screenPointPixelData, _x, _y, _width, _height);
//std::string log_msg = "Cobol " + std::to_string(result);
//Debug::Log(log_msg, Color::Green);
}
// Freely defined function to pass a callback to plugin-specific scripts
extern "C" UnityRenderingEvent UNITY_INTERFACE_EXPORT UNITY_INTERFACE_API
GetRenderEventScreenPointPixelFunc()
{
return OnRenderEventScreenPointPixel;
}
When compiled/built from Android Studio, it should give you two folders (armeabi-v7a and x86 at <ProjectDirectory>appuildintermediatescmake
eleaseobj
directory. They should both contain the shared *.so library. If you can't compile this for Android Studio then use the copy of Android Studio project I made for this here. You can use it to generate the shared *.so library.
Place both folders in your Unity project folders at AssetsPluginsAndroidlibs
.
You should now have:
AssetsPluginsAndroidlibsarmeabi-v7alibScreenPointPixel-lib.so
.
and
AssetsPluginsAndroidlibsx86libScreenPointPixel-lib.so
.
C# Test code:
Create a small simple RawImage
component and position it to the top-right of the screen. Drag that RawImage
to the rawImageColor slot in the script below. When you click anywhere on the screen, the pixel color of that screen point should shown on that rawImageColor
RawImage.
C#:
using System;
using System.Collections;
using System.Runtime.InteropServices;
using UnityEngine;
using UnityEngine.UI;
public class ScreenPointPixel : MonoBehaviour
{
[DllImport("ScreenPointPixel-lib", CallingConvention = CallingConvention.Cdecl)]
public static extern void initScreenPointPixel(IntPtr buffer, int x, int y, int width, int height);
//-------------------------------------------------------------------------------------
[DllImport("ScreenPointPixel-lib", CallingConvention = CallingConvention.Cdecl)]
public static extern void updateScreenPointPixelBufferPointer(IntPtr buffer);
//-------------------------------------------------------------------------------------
[DllImport("ScreenPointPixel-lib", CallingConvention = CallingConvention.Cdecl)]
public static extern void updateScreenPointPixelCoordinate(int x, int y);
//-------------------------------------------------------------------------------------
[DllImport("ScreenPointPixel-lib", CallingConvention = CallingConvention.Cdecl)]
public static extern void updateScreenPointPixelSize(int width, int height);
//-------------------------------------------------------------------------------------
//-------------------------------------------------------------------------------------
[DllImport("ScreenPointPixel-lib", CallingConvention = CallingConvention.StdCall)]
private static extern IntPtr GetRenderEventScreenPointPixelFunc();
//-------------------------------------------------------------------------------------
int width = 500;
int height = 500;
//Where Pixel data will be saved
byte[] screenData;
//Where handle that pins the Pixel data will stay
GCHandle pinHandler;
//Used to test the color
public RawImage rawImageColor;
// Use this for initialization
void Awake()
{
Resolution res = Screen.currentResolution;
width = res.width;
height = res.height;
//Allocate array to be used
screenData = new byte[width * height * 4];
//Pin the Array so that it doesn't move around
pinHandler = GCHandle.Alloc(screenData, GCHandleType.Pinned);
//Register the screenshot and pass the array that will receive the pixels
IntPtr arrayPtr = pinHandler.AddrOfPinnedObject();
initScreenPointPixel(arrayPtr, 0, 0, width, height);
StartCoroutine(caller());
}
IEnumerator caller()
{
while (true)
{
//Use mouse position as the pixel position
//Input.tou
#if UNITY_ANDROID || UNITY_IOS || UNITY_WSA_10_0
if (!(Input.touchCount > 0))
{
yield return null;
continue;
}
//Use touch position as the pixel position
int x = Mathf.FloorToInt(Input.GetTouch(0).position.x);
int y = Mathf.FloorToInt(Input.GetTouch(0).position.y);
#else
//Use mouse position as the pixel position
int x = Mathf.FloorToInt(Input.mousePosition.x);
int y = Mathf.FloorToInt(Input.mousePosition.y);
#endif
//Change this to any location from the screen you want
updateScreenPointPixelCoordinate(x, y);
//Must be 1 and 1
updateScreenPointPixelSize(1, 1);
//Take screenshot of the screen
GL.IssuePluginEvent(GetRenderEventScreenPointPixelFunc(), 1);
//Get the Color
Color32 tempColor = new Color32();
tempColor.r = screenData[0];
tempColor.g = screenData[1];
tempColor.b = screenData[2];
tempColor.a = screenData[3];
//Test it by assigning it to a raw image
rawImageColor.color = tempColor;
//Wait for a frame
yield return null;
}
}
void OnDisable()
{
//Unpin the array when disabled
pinHandler.Free();
}
}