为什么客户端C#的HoloLens无法从服务器Python的PC接收消息? 客户端可以将图像发送到服务器

问题描述

这是客户端(HoloLens)脚本的一部分。我使用ReceiveMessage()接收从服务器发送的数据,但实际上我什么也收不到。 SendImage(byte [] image)可以将图像从客户端发送到服务器,并且效果很好。为什么? ReceiveMessage()有什么问题吗? 获取字符串数据(名称形式为python)后,我想使用ReceiveMessage()的结果为对象的文本分配一个值,以显示在HoloLens上。 '''

using System;
#if UNITY_UWP
using System.IO;
using System.Threading.Tasks;
using Windows.Networking;
using Windows.Networking.Sockets;
#endif

public class TcpNetworkClientManager
{
    public static string Name = "";
#if UNITY_UWP    
    private Stream stream = null;
    private StreamWriter writer = null;
    private StreamReader reader = null;


#endif

    public TcpNetworkClientManager(string IP,int port)
    {
#if UNITY_UWP
        Task.Run(async () => {
            StreamSocket socket = new StreamSocket();            
            await socket.ConnectAsync(new HostName(IP),port.ToString());
            stream = socket.OutputStream.AsStreamForWrite();
            writer = new StreamWriter(stream);
            StreamReader reader = new StreamReader(socket.InputStream.AsStreamForRead());        
            writer = null;
   
        
        });
#endif
    }

    public void SendMessage(string data)
    {
#if UNITY_UWP
        if (writer != null) Task.Run(async () =>
        {
            await writer.WriteAsync(data);
            await writer.FlushAsync();
        });
#endif
    }

    public void SendImage(byte[] image)
    {
#if UNITY_UWP
        if (stream != null) Task.Run(async () =>
        {
            await stream.WriteAsync(image,image.Length);
            await stream.FlushAsync();
        });
#endif
    }

    public void ReceiveMessage()
    {
#if UNITY_UWP
        
        if (reader != null) Task.Run(async () =>
        {              
            try
            {
                string message = await reader.ReadToEndAsync();
                Name = message;
            }
            catch (Exception) { }  

           
                
        });
#endif
    }
}

''' 这是GameObject名为client的脚本。实现Unity中的功能是最重要的部分。 '''

using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.XR.WSA.WebCam;
using HoloToolkit.Unity.InputModule;
using System;
using System.Linq;

public class Client: MonoBehaviour,IInputClickHandler
{

    public string IP;
    public int port;
    public Text connectButtonText;
    public TextMesh debugText;

    private PhotoCapture photoCaptureObject = null;
    private Texture2D targetTexture = null;
    private Resolution cameraResolution;

    private TcpNetworkClientManager client = null;

    // Use this for initialization
    void Start () {
        cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
        //debugText.text = cameraResolution.width.ToString() + " " + cameraResolution.height.ToString();
        targetTexture = new Texture2D(cameraResolution.width,cameraResolution.height);
        // targetTexture = new Texture2D(480,270);
        // InputManager.Instance.PushFallbackInputHandler(gameObject);
        InputManager.Instance.AddGlobalListener(gameObject);
    }
    void Update()
    {
        if (client!=null)
        {
            client.ReceiveMessage();
            debugText.text = TcpNetworkClientManager.Name;
        }
    }

    void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result)
    {
        photoCaptureObject.Dispose();
        photoCaptureObject = null;
    }

    void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result,PhotoCaptureFrame photoCaptureFrame)
    {
        photoCaptureFrame.UploadImageDataToTexture(targetTexture);
        //byte[] texByte = targetTexture.EncodeToJPG();
        //byte[] image = new byte[texByte.Length];
        //Array.Copy(texByte,image,texByte.Length);

        byte[] image = targetTexture.GetRawTextureData();
        client.SendImage(image);
        photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
    }

    public void OnInputClicked(InputClickedEventData eventData)
    {
        if (client != null)
        {
            PhotoCapture.CreateAsync(true,delegate (PhotoCapture captureObject)
            {
                photoCaptureObject = captureObject;
                CameraParameters cameraParameters = new CameraParameters();
                cameraParameters.hologramOpacity = 0.9f;
                cameraParameters.cameraResolutionWidth = cameraResolution.width;
                cameraParameters.cameraResolutionHeight = cameraResolution.height;
                cameraParameters.pixelFormat = CapturePixelFormat.BGRA32;
                photoCaptureObject.StartPhotoModeAsync(cameraParameters,delegate (PhotoCapture.PhotoCaptureResult result)
                {
                    photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory);
                });
            });
        }
    }

    public void ConnectButtonClicked()
    {
        if(client != null)
        {
            Debug.Log("Disconnected");
            connectButtonText.text = "Connect";
            client = null;
        }
        else
        {
            Debug.Log("Connected");
            client = new TcpNetworkClientManager(IP,port);
            connectButtonText.text = "Disconnect";
        }
    }
}

'''

这是服务器(PC)脚本的一部分。我使用clientock.sendall(name.encode())从服务器向客户端发送名称数据(Python从HoloLens接收图像,然后处理该图像以获取图像中的人物姓名)。 '''

def main():
    host = "10.24.82.21"
    port = 8000

    width = 2048
    height = 1152

    serversock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
    serversock.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
    serversock.bind((host,port))
    serversock.listen(10)

    print('Waiting for connections...')
    clientsock,client_address = serversock.accept()
    
    print("Succeeded in Connection!")
    
    total = 0
    buffer_size = 4*width*height
    while True:
        data = b''
        data = clientsock.recv(buffer_size)
        print(len(data))
        if len(data) == buffer_size:
            tmp = np.frombuffer(data,np.uint8,-1)
            img = tmp.reshape(height,width,4)            
            #img = cv2.resize(img,(480,270))
            img = cv2.cvtColor(img,cv2.COLOR_RGB2BGR)
            img = cv2.flip(img,0)
            
            #cv2.imshow("img",img)
            #cv2.waitKey(1000)
            #LoadImages(data)
            name = FaceRec(img) #recognize the img and return the name
            print (name)
            clientsock.sendall(name.encode())
            print ("this is " + name)
            
            
            #cv2.imwrite("out.jpg",img)
            #cv2.waitKey(3000)
            #cv2.destroyAllWindows()
            #break
    clientsock.close()

'''

解决方法

有可能在异步任务生效之前调用ReceiveMessage方法,我对代码进行了一些修改,您可以以编程方式将其存储在项目中:

public TcpNetworkClientManager(string IP,int port)
{
#if !UNITY_EDITOR
    Connect(IP,port);
#endif
}

#if !UNITY_EDITOR

public async void Connect(string IP,int port)
{
    try
    {
        StreamSocket socket = new StreamSocket();
        await socket.ConnectAsync(new HostName(IP),port.ToString());

        Stream streamOut = socket.OutputStream.AsStreamForWrite();
        writer = new StreamWriter(streamOut) { AutoFlush = true };

        Stream streamIn = socket.InputStream.AsStreamForRead();
        reader = new StreamReader(streamIn);
        ReceiveMessage();
    }
    catch (Exception e)
    {
    }
}
#endif

相关问答

依赖报错 idea导入项目后依赖报错,解决方案:https://blog....
错误1:代码生成器依赖和mybatis依赖冲突 启动项目时报错如下...
错误1:gradle项目控制台输出为乱码 # 解决方案:https://bl...
错误还原:在查询的过程中,传入的workType为0时,该条件不起...
报错如下,gcc版本太低 ^ server.c:5346:31: 错误:‘struct...