I'm attempting to create a remote desktop server and client using C#. The server captures the screen and then sends it to the client via a socket. I'm using the code below although it only displays a part of the jpeg image on the client. I think this is because the image is sent in multiple packets and at the moment the code only reads the one packet and displays it. Can anyone explain how I would change my code so it receives multiple packets (the whole image) before displaying it.
Server code:
Socket serverSocket;
Socket clientSocket;
public Form1()
{
InitializeComponent();
backgroundWorker1.RunWorkerAsync();
}
private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
{
try
{
serverSocket = new Socket(AddressFamily.InterNetwork,
SocketType.Stream,
ProtocolType.Tcp);
IPEndPoint ipEndPoint = new IPEndPoint(IPAddress.Any, 8221);
serverSocket.Bind(ipEndPoint);
serverSocket.Listen(4);
//Accept the incoming clients
serverSocket.BeginAccept(new AsyncCallback(OnAccept), null);
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, "Stream Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
}
}
private void timer1_Tick(object sender, EventArgs e)
{
timer1.Stop();
Rectangle bounds = new Rectangle(0, 0, 1280, 720);
Bitmap bitmap = new Bitmap(bounds.Width, bounds.Height);
using (Graphics g = Graphics.FromImage(bitmap))
{
g.CopyFromScreen(Point.Empty, Point.Empty, bounds.Size);
}
System.IO.MemoryStream stream = new System.IO.MemoryStream();
ImageCodecInfo myImageCodecInfo;
System.Drawing.Imaging.Encoder myEncoder;
EncoderParameter myEncoderParameter;
EncoderParameters myEncoderParameters;
myEncoderParameters = new EncoderParameters(1);
myImageCodecInfo = GetEncoderInfo("image/jpeg");
myEncoder = System.Drawing.Imaging.Encoder.Quality;
myEncoderParameter = new EncoderParameter(myEncoder, 40L);
myEncoderParameters.Param[0] = myEncoderParameter;
bitmap.Save(stream, myImageCodecInfo, myEncoderParameters);
byte[] imageBytes = stream.ToArray();
stream.Dispose();
clientSocket.Send(imageBytes);
timer1.Start();
}
As you can see, I'm using a timer which has the interval set to 30 for sending the image bytes.
Client code:
public Socket clientSocket;
byte[] byteData = new byte[2048];
MemoryStream ms;
public Form1()
{
InitializeComponent();
backgroundWorker1.RunWorkerAsync();
this.DoubleBuffered = true;
}
private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
{
try
{
clientSocket = new Socket(AddressFamily.InterNetwork,
SocketType.Stream, ProtocolType.Tcp);
IPEndPoint ipEndPoint = new IPEndPoint(IPAddress.Parse("MY EXTERNAL IP HERE"), 8221);
//Connect to the server
clientSocket.BeginConnect(ipEndPoint,
new AsyncCallback(OnConnect), null);
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, "SGSclient",
MessageBoxButtons.OK,
MessageBoxIcon.Error);
}
}
private void OnConnect(IAsyncResult ar)
{
try
{
//Start listening to the data asynchronously
clientSocket.BeginReceive(byteData,
0,
byteData.Length,
SocketFlags.None,
new AsyncCallback(OnReceive),
null);
}
catch (Exception ex)
{
MessageBox.Show(ex.Message, "Stream Error",
MessageBoxButtons.OK, MessageBoxIcon.Error);
}
}
private void OnReceive(IAsyncResult ar)
{
try
{
int byteCount = clientSocket.EndReceive(ar);
ms = new MemoryStream(byteData);
using (BinaryReader br = new BinaryReader(ms))
{
this.BackgroundImage = Image.FromStream(ms).GetThumbnailImage(this.ClientRectangle.Width, this.ClientRectangle.Height, null, IntPtr.Zero);
}
}
catch (ArgumentException e)
{
//MessageBox.Show(e.Message);
}
clientSocket.BeginReceive(byteData, 0, byteData.Length, SocketFlags.None, new AsyncCallback(OnReceive), null);
}
The client is meant to receive the image and then display it on the form's background.
See Question&Answers more detail:
os