问题描述
我正在一个需要使用GStreamer流式传输视频的项目,然后需要在Unity中显示视频。
我正在尝试一种方法:
- 我使用以下GStreamer命令将网络摄像头视图放到tcp上
gst-launch-1.0 v4l2src ! videoconvert ! video/x-raw,width=640,height=480,framerate=20/1 ! jpegenc ! multipartmux ! tcpserversink host=127.0.0.1 port=5000
可以通过以下方式由GStreamer查看:
gst-launch-1.0 tcpclientsrc port=5000 host=127.0.0.1 ! multipartdemux ! jpegdec ! autovideosink
- 在Unity中,我使用以下脚本获取流字节并对其进行解码
public class _TextureReceiver : MonoBehavIoUr {
public int port = 5000;
public string IP = "127.0.0.1";
TcpClient client;
[HideInInspector]
public Texture2D texture;
private bool stop = false;
[Header("Must be the same in sender and receiver")]
public int messageByteLength = 24;
// Use this for initialization
void Start() {
Application.runInBackground = true;
client = new TcpClient();
//Connect to server from another Thread
Loom.RunAsync(() => {
// if on desktop
// client.Connect(IPAddress.Loopback,port);
client.Connect(IPAddress.Parse(IP),port);
imageReceiver();
});
}
void imageReceiver() {
//While loop in another Thread is fine so we don't block main Unity Thread
Loom.RunAsync(() => {
while (!stop) {
//Read Image Count
int imageSize = readImageByteSize(messageByteLength);
//Read Image Bytes and display it
readFrameByteArray(imageSize);
//readFrameByteArray(12288000);
}
});
}
//Converts the byte array to the data size and returns the result
int frameByteArrayToByteLength(byte[] frameBytesLength) {
/*if (BitConverter.IsLittleEndian)
{
Array.Reverse(frameBytesLength);
UnityEngine.Debug.Log("- BitConverter.IsLittleEndian");
}*/
var sb = new StringBuilder("frameBytesLength[] { ");
foreach (var b in frameBytesLength)
{
sb.Append(b + ",");
}
sb.Append("}");
UnityEngine.Debug.Log(sb.ToString());
//int byteLength = BitConverter.ToInt32(frameBytesLength,0);
int byteLength = frameBytesLength[0] + frameBytesLength[1] * 256;
return byteLength;
}
private int readImageByteSize(int size) {
UnityEngine.Debug.Log("- image byte size: " + size);
bool disconnected = false;
NetworkStream serverStream = client.GetStream();
UnityEngine.Debug.Log("- serverStream: " + serverStream);
byte[] imageBytesCount = new byte[size];
var total = 0;
do {
var read = serverStream.Read(imageBytesCount,total,size - total);
UnityEngine.Debug.LogFormat("Client recieved "+read+" bytes");
if (read == 0)
{
disconnected = true;
break;
}
total += read;
UnityEngine.Debug.Log("- image byte read: " + read);
UnityEngine.Debug.Log("- image byte total: " + total);
} while (total != size);
UnityEngine.Debug.Log("- break While");
int byteLength;
if (disconnected) {
UnityEngine.Debug.Log("disconnected");
byteLength = -1;
} else {
byteLength = frameByteArrayToByteLength(imageBytesCount);
}
return byteLength;
}
private void readFrameByteArray(int size) {
bool disconnected = false;
UnityEngine.Debug.Log("- image size: " + size);
NetworkStream serverStream = client.GetStream();
byte[] imageBytes = new byte[size];
var total = 0;
//do{
var read = serverStream.Read(imageBytes,size - total);
if (read == 0)
{
disconnected = true;
//break;
}
total += read;
UnityEngine.Debug.Log("- read: " + read);
UnityEngine.Debug.Log("- total: " + total);
//} while (total != size);
byte[] imageBytes2 = new byte[read];
imageBytes.copyTo(imageBytes2,0);
UnityEngine.Debug.Log("break while");
var sb = new StringBuilder("imageBytes[] { ");
foreach (var b in imageBytes2)
{
sb.Append(b + ",");
}
sb.Append("}");
UnityEngine.Debug.Log(sb.ToString());
bool readyToReadAgain = false;
//display Image
if (!disconnected) {
//display Image on the main Thread
Loom.QueueOnMainThread(() => {
loadReceivedImage(imageBytes2);
readyToReadAgain = true;
});
}
//Wait until old Image is displayed
while (!readyToReadAgain) {
System.Threading.Thread.Sleep(1);
}
}
void loadReceivedImage(byte[] receivedImageBytes) {
if(texture) texture.LoadImage(receivedImageBytes);
}
public void SetTargetTexture (Texture2D t) {
texture = t;
}
void OnApplicationQuit() {
stop = true;
if (client != null) {
client.Close();
}
}
}
但是,它不起作用。
我认为是因为编码方法和解码方法不匹配,但是我不知道如何解释GStreamer的字节序列。
在int imageSize = readImageByteSize(messageByteLength);
,
imageSize应该使用一个看起来像这样的传入字节来计算:
frameBytesLength [] {196,105,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,}
但是它看起来像这样: frameBytesLength [] {255,216,255,224,0,16,74,70,73,70,0,1,1,1,0,0,1,0,1,0,255,219,0, 67,} 导致大小过大而导致变为负数,因此无法继续解码过程。 到目前为止,这就是我要做的。
以下是Unity代码的参考: https://github.com/BarakChamo/uTextureSendReceive
p.s。我正在使用Linux,所以不能使用Mray GStreamer Unity插件,因为它不支持Linux。 谢谢您的时间。
解决方法
我以一种不太优雅的方式解决了这个问题。 我只是给接收到的每个帧分配一个固定的最大字节大小。 这不是最好的方法,但这对我有用。