0

Trying to create an async tcp server. Previously I could accomplish this with Begin/End pattern pretty good and handle multiple clients like this:

class Server
{
    IPEndPoint ipep;
    TcpListener listener;
    bool Running;
    List<Client> clients;

    public Server(string host)
    {
        IPAddress ip = IPAddress.Parse(host);
        ipep = new(ip, 21);
        Running = false;
        clients = new();
    }

    public void Start()
    {
        listener = new(ipep);
        listener.Start();
        Running = true;
        while (Running)
            listener.BeginAcceptTcpClient(Accept, null);
    }

    public void Accept(IAsyncResult ar)
    {
        Client client = new(listener.EndAcceptTcpClient(ar));
        clients.Add(client);
        client.WaitCommands();
    }
}

class Client
{
    TcpClient client;
    NetworkStream stream;

    public Client(TcpClient client)
    {
        this.client = client;
        stream = client.GetStream();
    }

    public void WaitCommands()
    {
        stream.BeginRead(/*some buffer stuff*/, Receive, null);
    }

    public void Receive(IAsyncResult ar)
    {
        stream.EndRead(ar);
        stream.BeginRead(/*again buffer stuff*/, Receive, null);
    }
}

and there are a lot of examples of this in the Internet. But MSDN seems to recommend using async methods instead, so I wanna convert to it. Here's what I have:

class Server
{
    IPEndPoint ipep;
    TcpListener listener;
    bool Running;
    List<Client> clients;

    public Server(string host)
    {
        IPAddress ip = IPAddress.Parse(host);
        ipep = new(ip, 21);
        Running = false;
        clients = new();
    }

    public async Task Start()
    {
        listener = new(ipep);
        listener.Start();
        Running = true;
        while (Running)
        {
            Client c = await listener.AcceptTcpClientAsync();
            clients.Add(c);
            await c.WaitCommands();
        }
    }
}

class Client
{
    TcpClient client;
    NetworkStream stream;

    public Client(TcpClient client)
    {
        this.client = client;
        stream = client.GetStream();
    }

    public async Task WaitCommands()
    {
        while (true)
        {
            await stream.ReadAsync(/*buffer stuff*/);
        }
    }
}

and obviously await c.WaitCommands(); blocks other clients, since app is stuck in while (true) loop and never reaches await Accept again. I found some that _ = Task.Run(async () => await client.WaitCommands()); does the trick. But as I understood that takes threads from threadpool, unlike Begin/End approach (or am I wrong? that's the question too).

So the questions are

  1. How to start reading from client and accept another ones in the second example?
  2. Am I going the right way? Or what approach should be used for most user-count scale (i.e. maybe should I go some threads per client instead)?
SelfishCrawler
  • 225
  • 2
  • 12
  • In the accept loop you shouldn't wait for the client to complete. Using one thread for the accept loop and a new thread for each client is a simple model, and scales well up to several hundred clients. – David Browne - Microsoft Jan 01 '22 at 19:06
  • @DavidBrowne-Microsoft yeah that's what I understand, but how do I use async approach? the first example with callbacks works very well, but I can't get how to do the same with async/await, or should I even do it like MSDN suggests – SelfishCrawler Jan 01 '22 at 19:13

1 Answers1

1

Something like this:

using System.Net;
using System.Net.Sockets;

var svr = new Server("127.0.0.1");
await svr.Run();
Console.WriteLine("Goodby, World!");

class Server
{
    IPEndPoint ipep;
    TcpListener listener;
    bool Running;
    List<Client> clients;
    private CancellationTokenSource cts;

    public Server(string host)
    {
        IPAddress ip = IPAddress.Parse(host);
        ipep = new(ip, 1121);
        Running = false;
        clients = new();

        this.cts = new CancellationTokenSource();
    }

    public void Stop()
    {
        Running = false;
        cts.Cancel();
    }
    public async Task Run()
    {
        listener = new(ipep);
        listener.Start();
        Running = true;
        while (Running)
        {
            var c = await listener.AcceptTcpClientAsync(cts.Token);
            var client = new Client(c);
            clients.Add(client);
            var clientTask = client.Run(); //don't await
            clientTask.ContinueWith(t => clients.Remove(client));
        }
    }
}

class Client
{
    TcpClient client;
    NetworkStream stream;

    public Client(TcpClient client)
    {
        this.client = client;
        stream = client.GetStream();
        
    }

    public async Task Run()
    {
        var r = new StreamReader(stream);
        var w = new StreamWriter(stream);
        while (true)
        {
            await w.WriteLineAsync("You are standing in an open field west of a white house, with a boarded front door. There is a small mailbox here.");
            await w.WriteAsync(">");
            await w.FlushAsync();

            var l = await r.ReadLineAsync();
            await w.WriteLineAsync("Invalid command " + l);
        }
    }
}

Which you can test with

c:\> telnet localhost 1121
David Browne - Microsoft
  • 80,331
  • 6
  • 39
  • 67
  • Calling reading task without await did the job, thanks! Is this approach better than thread per client? – SelfishCrawler Jan 01 '22 at 19:53
  • 1
    Yes. It combines the simplicity of thread-per-client with the scalability of not having a large number of idle threads. – David Browne - Microsoft Jan 01 '22 at 19:54
  • When trying to compile this code, `var c = await listener.AcceptTcpClientAsync(cts.Token);` results in error `CS1501: No overload for method 'AcceptTcpClientAsync' takes 1 arguments` - VS 2019 / .NET 5 – Tu deschizi eu inchid Jan 01 '22 at 22:19
  • It's new in .NET 6. And .NET 5 is nearing end-of-support. So you should upgreade. For previous versions call `TcpListener.Stop()` to cancel, see eghttps://stackoverflow.com/questions/12231789/cancel-blocking-accepttcpclient-call – David Browne - Microsoft Jan 01 '22 at 22:36