Commit 6386276e authored by BERTON Florian Gaetan Hubert's avatar BERTON Florian Gaetan Hubert
Browse files

Merge remote-tracking branch 'origin/dev' into 7_command_line

parents c4a0ea26 0fb21edf
fileFormatVersion: 2
guid: 25eb80f5b2be01542bc0b413fcf1e3b1
folderAsset: yes
DefaultImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:
using UnityEngine;
public class ColorEncoding
{
public static byte ReverseBits(byte value)
{
return (byte)((value * 0x0202020202 & 0x010884422010) % 1023);
}
public static int SparsifyBits(byte value, int sparse)
{
int retVal = 0;
for (int bits = 0; bits < 8; bits++, value >>= 1)
{
retVal |= (value & 1);
retVal <<= sparse;
}
return retVal >> sparse;
}
public static Color EncodeIDAsColor(int instanceId)
{
var uid = instanceId * 2;
if (uid < 0)
uid = -uid + 1;
var sid =
(SparsifyBits((byte)(uid >> 16), 3) << 2) |
(SparsifyBits((byte)(uid >> 8), 3) << 1) |
SparsifyBits((byte)(uid), 3);
//Debug.Log(uid + " >>> " + System.Convert.ToString(sid, 2).PadLeft(24, '0'));
var r = (byte)(sid >> 8);
var g = (byte)(sid >> 16);
var b = (byte)(sid);
//Debug.Log(r + " " + g + " " + b);
return new Color32(r, g, b, 255);
}
public static Color EncodeTagAsColor(string tag)
{
var hash = tag.GetHashCode();
var a = (byte)(hash >> 24);
var r = (byte)(hash >> 16);
var g = (byte)(hash >> 8);
var b = (byte)(hash);
return new Color32(r, g, b, a);
}
public static Color EncodeLayerAsColor(int layer)
{
// Following value must be in the range (0.5 .. 1.0)
// in order to avoid color overlaps when using 'divider' in this func
var z = .7f;
// First 8 layers are Unity Builtin layers
// Unity supports up to 32 layers in total
// Lets create palette of unique 16 colors
var uniqueColors = new Color[] {
new Color(1,1,1,1), new Color(z,z,z,1), // 0
new Color(1,1,z,1), new Color(1,z,1,1), new Color(z,1,1,1), //
new Color(1,z,0,1), new Color(z,0,1,1), new Color(0,1,z,1), // 7
new Color(1,0,0,1), new Color(0,1,0,1), new Color(0,0,1,1), // 8
new Color(1,1,0,1), new Color(1,0,1,1), new Color(0,1,1,1), //
new Color(1,z,z,1), new Color(z,1,z,1) // 15
};
// Create as many colors as necessary by using base 16 color palette
// To create more than 16 - will simply adjust brightness with 'divider'
var color = uniqueColors[layer % uniqueColors.Length];
var divider = 1.0f + Mathf.Floor(layer / uniqueColors.Length);
color /= divider;
return color;
}
}
fileFormatVersion: 2
guid: 39d95691507b4fd4eafba7727fa50f9e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
using UnityEngine;
using UnityEngine.Rendering;
using System.Collections;
using System.IO;
// @TODO:
// . support custom color wheels in optical flow via lookup textures
// . support custom depth encoding
// . support multiple overlay cameras
// . tests
// . better example scene(s)
// @KNOWN ISSUES
// . Motion Vectors can produce incorrect results in Unity 5.5.f3 when
// 1) during the first rendering frame
// 2) rendering several cameras with different aspect ratios - vectors do stretch to the sides of the screen
[RequireComponent(typeof(Camera))]
public class FiltersControl : MonoBehaviour
{
[Header("Shader Setup")]
public Shader learningFiltersShader;
public Shader opticalFlowShader;
public float opticalFlowSensitivity = 1.0f;
[Header("Save Image Capture")]
public bool saveImage = false;
public bool saveIdSegmentation = false;
public bool saveLayerSegmentation = false;
public bool saveDepth = false;
public bool saveNormals = false;
public bool saveOpticalFlow = false;
public string filepath = "..\\Captures";
public string filename = "test.png";
private int currentCam = 0;
// pass configuration
private CapturePass[] capturePasses = new CapturePass[] {
new CapturePass() { name = "Images" },
new CapturePass() { name = "Segmentation", supportsAntialiasing = false },
new CapturePass() { name = "Categories", supportsAntialiasing = false },
new CapturePass() { name = "Depth" },
new CapturePass() { name = "Normals" },
new CapturePass() { name = "OpticalFlow", supportsAntialiasing = false, needsRescale = true } // (see issue with Motion Vectors in @KNOWN ISSUES)
};
struct CapturePass
{
// configuration
public string name;
public bool supportsAntialiasing;
public bool needsRescale;
public CapturePass(string name_) { name = name_; supportsAntialiasing = true; needsRescale = false; camera = null; }
// impl
public Camera camera;
};
// cached materials
private Material opticalFlowMaterial;
void Start()
{
// default fallbacks, if shaders are unspecified
if (!learningFiltersShader)
learningFiltersShader = Shader.Find("Hidden/LearningFilters");
if (!opticalFlowShader)
opticalFlowShader = Shader.Find("Hidden/OpticalFlow");
// use real camera to capture final image
capturePasses[0].camera = GetComponent<Camera>();
for (int q = 1; q < capturePasses.Length; q++)
capturePasses[q].camera = CreateHiddenCamera(capturePasses[q].name);
OnCameraChange();
OnSceneChange();
}
void LateUpdate()
{
#if UNITY_EDITOR
if (DetectPotentialSceneChangeInEditor())
OnSceneChange();
#endif // UNITY_EDITOR
// @TODO: detect if camera properties actually changed
OnCameraChange();
}
private Camera CreateHiddenCamera(string name)
{
var go = new GameObject(name, typeof(Camera));
go.hideFlags = HideFlags.HideAndDontSave;
go.transform.parent = transform;
var newCamera = go.GetComponent<Camera>();
return newCamera;
}
public void cycleReset()
{
int newCam = 0;
Debug.Log(currentCam + " => " + newCam);
//capturePasses[currentCam].camera.enabled = false;
//capturePasses[newCam].camera.enabled = true;
capturePasses[currentCam].camera.targetDisplay = 1;
capturePasses[newCam].camera.targetDisplay = 0;
currentCam = newCam;
}
public void cycleForward()
{
int newCam = (currentCam + 1) % capturePasses.Length;
Debug.Log(currentCam + " => " + newCam);
//capturePasses[currentCam].camera.enabled = false;
//capturePasses[newCam].camera.enabled = true;
capturePasses[currentCam].camera.targetDisplay = 1;
capturePasses[newCam].camera.targetDisplay= 0;
currentCam = newCam;
}
public void cycleBackward()
{
int newCam = currentCam - 1;
if (newCam < 0)
newCam = capturePasses.Length - 1;
Debug.Log(currentCam + " => " + newCam);
//capturePasses[currentCam].camera.enabled = false;
//capturePasses[newCam].camera.enabled = true;
capturePasses[currentCam].camera.targetDisplay = 1;
capturePasses[newCam].camera.targetDisplay = 0;
currentCam = newCam;
}
static private void SetupCameraWithReplacementShader(Camera cam, Shader shader, ReplacementMode mode)
{
SetupCameraWithReplacementShader(cam, shader, mode, Color.black);
}
static private void SetupCameraWithReplacementShader(Camera cam, Shader shader, ReplacementMode mode, Color clearColor)
{
var cb = new CommandBuffer();
cb.SetGlobalFloat("_OutputMode", (int)mode); // @TODO: CommandBuffer is missing SetGlobalInt() method
cam.AddCommandBuffer(CameraEvent.BeforeForwardOpaque, cb);
cam.AddCommandBuffer(CameraEvent.BeforeFinalPass, cb);
cam.SetReplacementShader(shader, "");
cam.backgroundColor = clearColor;
cam.clearFlags = CameraClearFlags.SolidColor;
cam.allowHDR = false;
cam.allowMSAA = false;
}
static private void SetupCameraWithPostShader(Camera cam, Material material, DepthTextureMode depthTextureMode = DepthTextureMode.None)
{
var cb = new CommandBuffer();
cb.Blit(null, BuiltinRenderTextureType.CurrentActive, material);
cam.AddCommandBuffer(CameraEvent.AfterEverything, cb);
cam.depthTextureMode = depthTextureMode;
}
public enum ReplacementMode
{
ObjectId = 0,
CatergoryId = 1,
DepthCompressed = 2,
DepthMultichannel = 3,
Normals = 4
};
public void OnCameraChange()
{
int targetDisplay = 1;
var mainCamera = GetComponent<Camera>();
foreach (var pass in capturePasses)
{
if (pass.camera == mainCamera)
continue;
// cleanup capturing camera
pass.camera.RemoveAllCommandBuffers();
// copy all "main" camera parameters into capturing camera
pass.camera.CopyFrom(mainCamera);
// set targetDisplay here since it gets overriden by CopyFrom()
if (targetDisplay== currentCam)
pass.camera.targetDisplay = 0;
else
pass.camera.targetDisplay = targetDisplay+1;
targetDisplay++;
}
// cache materials and setup material properties
if (!opticalFlowMaterial || opticalFlowMaterial.shader != opticalFlowShader)
opticalFlowMaterial = new Material(opticalFlowShader);
opticalFlowMaterial.SetFloat("_Sensitivity", opticalFlowSensitivity);
// setup command buffers and replacement shaders
SetupCameraWithReplacementShader(capturePasses[1].camera, learningFiltersShader, ReplacementMode.ObjectId);
SetupCameraWithReplacementShader(capturePasses[2].camera, learningFiltersShader, ReplacementMode.CatergoryId);
SetupCameraWithReplacementShader(capturePasses[3].camera, learningFiltersShader, ReplacementMode.DepthCompressed, Color.white);
SetupCameraWithReplacementShader(capturePasses[4].camera, learningFiltersShader, ReplacementMode.Normals);
SetupCameraWithPostShader(capturePasses[5].camera, opticalFlowMaterial, DepthTextureMode.Depth | DepthTextureMode.MotionVectors);
}
public void OnSceneChange()
{
var renderers = Object.FindObjectsOfType<Renderer>();
var mpb = new MaterialPropertyBlock();
foreach (var r in renderers)
{
var id = r.gameObject.GetInstanceID();
var layer = r.gameObject.layer;
var tag = r.gameObject.tag;
mpb.SetColor("_ObjectColor", ColorEncoding.EncodeIDAsColor(id));
mpb.SetColor("_CategoryColor", ColorEncoding.EncodeLayerAsColor(layer));
r.SetPropertyBlock(mpb);
}
}
public void Save(string filename, int width = -1, int height = -1, string path = "")
{
if (width <= 0 || height <= 0)
{
width = Screen.width;
height = Screen.height;
}
var filenameExtension = System.IO.Path.GetExtension(filename);
if (filenameExtension == "")
filenameExtension = ".png";
var filenameWithoutExtension = Path.GetFileNameWithoutExtension(filename);
var pathWithoutExtension = Path.Combine(path, filenameWithoutExtension);
// execute as coroutine to wait for the EndOfFrame before starting capture
Save(filenameWithoutExtension, filenameExtension, width, height, path);
//StartCoroutine(
// WaitForEndOfFrameAndSave(pathWithoutExtension, filenameExtension, width, height, path));
}
//private IEnumerator WaitForEndOfFrameAndSave(string filenameWithoutExtension, string filenameExtension, int width, int height, string path)
//{
// yield return new WaitForEndOfFrame();
// Save(filenameWithoutExtension, filenameExtension, width, height, path);
//}
private void Save(string filenameWithoutExtension, string filenameExtension, int width, int height, string path)
{
foreach (var pass in capturePasses)
{
// Perform a check to make sure that the capture pass should be saved
if (
(pass.name == "Images" && saveImage)
|| (pass.name == "Segmentation" && saveIdSegmentation)
|| (pass.name == "Categories" && saveLayerSegmentation)
|| (pass.name == "Depth" && saveDepth)
|| (pass.name == "Normals" && saveNormals)
|| (pass.name == "OpticalFlow" && saveOpticalFlow)
)
{
string subPath = Path.Combine(path, pass.name);
if (!System.IO.Directory.Exists(subPath))
System.IO.Directory.CreateDirectory(subPath);
Save(pass.camera, Path.Combine(subPath, filenameWithoutExtension+ filenameExtension), width, height, pass.supportsAntialiasing, pass.needsRescale);
}
}
}
private void Save(Camera cam, string filename, int width, int height, bool supportsAntialiasing, bool needsRescale)
{
var mainCamera = GetComponent<Camera>();
var depth = 24;
var format = RenderTextureFormat.Default;
var readWrite = RenderTextureReadWrite.Default;
var antiAliasing = (supportsAntialiasing) ? Mathf.Max(1, QualitySettings.antiAliasing) : 1;
var finalRT =
RenderTexture.GetTemporary(width, height, depth, format, readWrite, antiAliasing);
var renderRT = (!needsRescale) ? finalRT :
RenderTexture.GetTemporary(mainCamera.pixelWidth, mainCamera.pixelHeight, depth, format, readWrite, antiAliasing);
var tex = new Texture2D(width, height, TextureFormat.RGB24, false);
var prevActiveRT = RenderTexture.active;
var prevCameraRT = cam.targetTexture;
// render to offscreen texture (readonly from CPU side)
RenderTexture.active = renderRT;
cam.targetTexture = renderRT;
cam.Render();
if (needsRescale)
{
// blit to rescale (see issue with Motion Vectors in @KNOWN ISSUES)
RenderTexture.active = finalRT;
Graphics.Blit(renderRT, finalRT);
RenderTexture.ReleaseTemporary(renderRT);
}
// read offsreen texture contents into the CPU readable texture
tex.ReadPixels(new Rect(0, 0, tex.width, tex.height), 0, 0);
tex.Apply();
// encode texture into PNG
var bytes = tex.EncodeToPNG();
File.WriteAllBytes(filename, bytes);
// restore state and cleanup
cam.targetTexture = prevCameraRT;
RenderTexture.active = prevActiveRT;
Object.Destroy(tex);
RenderTexture.ReleaseTemporary(finalRT);
}
#if UNITY_EDITOR
private GameObject lastSelectedGO;
private int lastSelectedGOLayer = -1;
private string lastSelectedGOTag = "unknown";
private bool DetectPotentialSceneChangeInEditor()
{
bool change = false;
// there is no callback in Unity Editor to automatically detect changes in scene objects
// as a workaround lets track selected objects and check, if properties that are
// interesting for us (layer or tag) did not change since the last frame
if (UnityEditor.Selection.transforms.Length > 1)
{
// multiple objects are selected, all bets are off!
// we have to assume these objects are being edited
change = true;
lastSelectedGO = null;
}
else if (UnityEditor.Selection.activeGameObject)
{
var go = UnityEditor.Selection.activeGameObject;
// check if layer or tag of a selected object have changed since the last frame
var potentialChangeHappened = lastSelectedGOLayer != go.layer || lastSelectedGOTag != go.tag;
if (go == lastSelectedGO && potentialChangeHappened)
change = true;
lastSelectedGO = go;
lastSelectedGOLayer = go.layer;
lastSelectedGOTag = go.tag;
}
return change;
}
#endif // UNITY_EDITOR
}
fileFormatVersion: 2
guid: c72025d3fcfab234ea43bd743d6e8d8f
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:
Shader "Custom/LearningFilters"
{
Properties{
_MainTex("", 2D) = "white" {}
_Cutoff("", Float) = 0.5
_Color("", Color) = (1,1,1,1)
_ObjectColor("Object Color", Color) = (1,1,1,1)
_CategoryColor("Catergory Color", Color) = (0,1,0,1)
}
SubShader{
CGINCLUDE
fixed4 _ObjectColor;
fixed4 _CategoryColor;
int _OutputMode;
// remap depth: [0 @ eye .. 1 @ far] => [0 @ near .. 1 @ far]
inline float Linear01FromEyeToLinear01FromNear(float depth01)
{
float near = _ProjectionParams.y;
float far = _ProjectionParams.z;
return (depth01 - near / far) * (1 + near / far);
}
float4 Output(float depth01, float3 normal)
{
/* see ImageSynthesis.cs
enum ReplacelementModes {
ObjectId = 0,
CatergoryId = 1,
DepthCompressed = 2,
DepthMultichannel = 3,
Normals = 4
};*/
if (_OutputMode == 0) // ObjectId
{
return _ObjectColor;
}
else if (_OutputMode == 1) // CatergoryId
{
return _CategoryColor;
}
else if (_OutputMode == 2) // DepthCompressed
{
float linearZFromNear = Linear01FromEyeToLinear01FromNear(depth01);
float k = 0.25; // compression factor
return pow(linearZFromNear, k);
}
else if (_OutputMode == 3) // DepthMultichannel
{
float lowBits = frac(depth01 * 256);
float highBits = depth01 - lowBits / 256;
return float4(lowBits, highBits, depth01, 1);
}
else if (_OutputMode == 4) // Normals
{
// [-1 .. 1] => [0 .. 1]
float3 c = normal * 0.5 + 0.5;
return float4(c, 1);
}
// unsupported _OutputMode
return float4(1, 0.5, 0.5, 1);
}
ENDCG
// Support for different RenderTypes
// The following code is based on builtin Internal-DepthNormalsTexture.shader
Tags { "RenderType" = "Opaque" }
Pass {
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct v2f {
float4 pos : SV_POSITION;
float4 nz : TEXCOORD0;
UNITY_VERTEX_OUTPUT_STEREO
};
v2f vert(appdata_base v) {
v2f o;
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
o.pos = UnityObjectToClipPos(v.vertex);
o.nz.xyz = COMPUTE_VIEW_NORMAL;
o.nz.w = COMPUTE_DEPTH_01;
return o;
}
fixed4 frag(v2f i) : SV_Target {
return Output(i.nz.w, i.nz.xyz);
}
ENDCG
}
}
SubShader{
Tags { "RenderType" = "TransparentCutout" }
Pass {
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct v2f {
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float4 nz : TEXCOORD1;
UNITY_VERTEX_OUTPUT_STEREO
};
uniform float4 _MainTex_ST;
v2f vert(appdata_base v) {
v2f o;