1

I just made a working Get method for Orders combining two methods:

  1. This method gets all orders and data associated with it:

        private async Task<(List<Order>, int)> GetOrdersWithDataAsync(NpgsqlConnection connection, NpgsqlTransaction transaction, OrderFilter filtering, Sorting sorting)
    {
        List<Order> orders = new List<Order>();
        int totalCount = 0;
    
        StringBuilder cmdBuilder = new StringBuilder("select o.\"Id\", o.\"ShippingAddressId\", o.\"BillingAddressId\", o.\"PersonId\", o.\"TotalPrice\", " +
                        " pe.\"Id\" as \"PersonId\", pe.\"FirstName\", pe.\"LastName\", pe.\"Email\", sa.\"Id\" as \"ShippingAddressId\" sa.\"StreetName\" as \"ShippingStreetName\", " +
                        "sa.\"StreetNumber\" as \"ShippingStreetNumber\",sa.\"City\" as \"ShippingCity\", sa.\"Zipcode\" as \"ShippingZipcode\", " +
                        "ba.\"Id\" as \"BillingAddressId\", ba.\"StreetName\" as \"BillingStreetName\", ba.\"StreetNumber\" as \"BillingStreetNumber\", ba.\"City\" as \"BillingCity\", " +
                        "ba.\"Zipcode\" as \"BillingZipcode\", Count(*) Over() as \"TotalCount\" from \"Order\" o " +
                        "inner join \"Person\" pe on o.\"PersonId\" = pe.\"Id\" inner join \"Address\" sa on sa.\"Id\" = o.\"ShippingAddressId\" inner join \"Address\" ba on ba.\"Id\" = " +
                        "o.\"BillingAddressId\"");
    
        NpgsqlCommand cmd = new NpgsqlCommand("" , connection);
    
    
        cmdBuilder.Append(" WHERE 1 = 1");
        if(!string.IsNullOrEmpty(filtering.SearchQuery))
        {
            cmdBuilder.Append(" AND pe.\"FirstName\" LIKE \'%@SearchQuery%\' OR pe.\"LastName\" LIKE \'%@SearchQuery%\'");
            cmd.Parameters.AddWithValue("@SearchQuery", filtering.SearchQuery);
        } 
        if(filtering.MinPrice.HasValue && filtering.MinPrice.Value != 0)
        {
            cmdBuilder.Append(" AND o.\"TotalPrice\" > @MinPrice");
            cmd.Parameters.AddWithValue("@MinPrice", filtering.MinPrice);
        }
        if (filtering.MaxPrice.HasValue && filtering.MaxPrice.Value != 0)
        {
            cmdBuilder.Append(" AND o.\"TotalPrice\" < @MaxPrice");
            cmd.Parameters.AddWithValue("@MaxPrice", filtering.MaxPrice);
        }
    
        cmd.CommandText = cmdBuilder.ToString();
        cmd.Transaction = transaction;
        cmd.Connection = connection;
    
        NpgsqlDataReader reader = await cmd.ExecuteReaderAsync();
        try
        {
            using (reader)
            {
                if (reader.HasRows)
                {
                    while (reader.Read())
                    {
                        Guid orderId = (Guid)reader["Id"];
    
                        Order order = new Order();
                        order.ShippingAddress = new Address();
                        order.BillingAddress = new Address();
                        order.Products = new List<Product>();
    
                        order.Id = (Guid)reader["Id"];
                        order.TotalPrice = (decimal)reader["TotalPrice"];
                        order.Person = new Person();
                        order.Person.Id = (Guid)reader["PersonId"];
                        order.Person.FirstName = (string)reader["FirstName"];
                        order.Person.LastName = (string)reader["LastName"];
                        order.Person.Email = (string)reader["Email"];
                        order.PersonId = (Guid)reader["PersonId"];
                        order.ShippingAddress.Id = (Guid)reader["ShippingAddressId"];
                        order.ShippingAddress.StreetName = (string)reader["ShippingStreetName"];
                        order.ShippingAddress.StreetNumber = (string)reader["ShippingStreetNumber"];
                        order.ShippingAddress.City = (string)reader["ShippingCity"];
                        order.ShippingAddress.ZipCode = (int)reader["ShippingZipcode"];
                        order.BillingAddress.Id = (Guid)reader["BillingAddressId"];
                        order.BillingAddress.StreetName = (string)reader["BillingStreetName"];
                        order.BillingAddress.StreetNumber = (string)reader["BillingStreetNumber"];
                        order.BillingAddress.City = (string)reader["BillingCity"];
                        order.BillingAddress.ZipCode = (int)reader["BillingZipcode"];
    
                        orders.Add(order);
                    }
                }
                return (orders, totalCount);
            }
        }
        catch(Exception ex)
        {
            throw ex;
        }
    
    }
    
  2. A method that gets products for each order from the product repository which I injected into OrderRepository:

        public async Task<List<Order>> GetProductsByOrderIdAsync(List<Order> orders, NpgsqlConnection connection, NpgsqlTransaction transaction)
    {
        foreach (var order in orders)
        {
            List<Product> products = new List<Product>();
    
            try
            {
                NpgsqlCommand cmd = new NpgsqlCommand("SELECT p.\"Id\", p.\"Name\", p.\"Price\", po.\"ProductQty\" as \"Quantity\" FROM \"Product\" as p INNER JOIN \"ProductOrder\" as po ON p.\"Id\" = po.\"ProductId\" WHERE po.\"OrderId\" = @id", connection);
                cmd.Parameters.AddWithValue("id", order.Id);
                cmd.Transaction = transaction;
                cmd.Connection = connection;
    
                NpgsqlDataReader reader = await cmd.ExecuteReaderAsync();
                if (reader.HasRows) 
                {
                    while (reader.Read())
                    {
                        Product product = new Product();
                        product.Id = (Guid)reader["Id"];
                        product.Name = (string)reader["Name"];
                        product.Price = (decimal)reader["Price"];
                        product.Quantity = (int)reader["Quantity"];
                        products.Add(product);
                    }
                }
                reader.Close();
            }
            catch (Exception ex)
            {
                throw ex;
            }
            order.Products = products;
        }
    
        return orders;
    }
    

Both of these are done through this one:

        public async Task<PagedList<Order>> GetOrdersAsync(Paginate paginate, OrderFilter filtering, Sorting sorting)
    {
        NpgsqlConnection connection = new NpgsqlConnection(_connectionProvider.GetConnectionString());
        await connection.OpenAsync();

        try
        {
            using (connection)
            {
                NpgsqlTransaction transaction = connection.BeginTransaction();

                try
                {
                    (List<Order> orders, int totalCount) = await GetOrdersWithDataAsync(connection, transaction, filtering, sorting);

                    List<Order> ordersWithProducts = await _productRepository.GetProductsByOrderIdAsync(orders, connection, transaction);

                    PagedList<Order> pagedOrders = new PagedList<Order>()
                    {
                        Results = ordersWithProducts,
                        CurrentPage = paginate.PageNumber,
                        PageSize = paginate.PageSize,
                        TotalCount = orders.Count
                    };

                    if (pagedOrders != null && pagedOrders.TotalCount > 0)
                    {
                        transaction.Commit();
                        return pagedOrders;
                    }
                }
                catch (Exception ex)
                {
                    transaction.Rollback();
                    throw ex;
                }
            }
        }
        catch (Exception ex)
        {
            throw ex;
        }

        return null;
    }

The thing that worries me is the select statements for each order in the GetProductsByOrderId method, is there a better way to do this? I already did it using only one query in OrderRepository by checking existing order and adding product to it, but that seems slow also.

Also, would it be better to use two separate transactions and rollback the first one if the second one (for products) fails?

  • 1
    You don't need a transaction when you're *retrieving* data. If you're not modifying data, there's nothing to commit or rollback. – madreflection Jul 28 '23 at 13:58
  • 1
    You can use IN clause and do it in a single query. In the code you need to handle logic to get order IDs. – Max Naumov Jul 28 '23 at 13:58
  • I was told to use transactions to be safe, if something breaks, it should just return an exception? Ok, I'll check how the IN clause works – PhantomPhreak1995 Jul 28 '23 at 14:04
  • 1
    Yes, use transactions to be safe, but use transactions only when they make sense, which is modifying data, not retrieving it. They don't do anything for you when you're retrieving data, and it may be costly to start a transaction because it has to allocate resources for tracking changes. If no changes are being made, they were allocated for nothing; wasted. It's important to understand the advice you were given. Don't just use it blindly. – madreflection Jul 28 '23 at 14:17
  • Alright, so if I were to use my business layer to get orders and then get products for them, I would need to use two transactions so I can rollback the first one in case something goes wrong with fetching the products? I can see it being used in that case – PhantomPhreak1995 Jul 28 '23 at 14:36
  • @madreflection Not really true. If you retrieve data in two separate queries and want that data to be consistent then you need a transaction. Equally, if you are modifying data but doing it in a single statement then you don't need a transaction. Every statement runs in an atomic transaction anyway. – Charlieface Jul 28 '23 at 14:39
  • 2
    `" AND pe.\"FirstName\" LIKE \'%@SearchQuery%\' OR pe.\"LastName\" LIKE \'%@SearchQuery%\'"` isn't going to work. Parameters are not embedded into the statement, they are *bound* and in this case the string `%@SearchQuery%` is just a string. Instead you need `" AND pe.FirstName LIKE '%' || @SearchQuery || '%' OR pe."LastName" LIKE '%' || @SearchQuery || '%'"` – Charlieface Jul 28 '23 at 14:41
  • 1
    @Charlieface: Unless you have the isolation level set to dirty read, then as long as you make multiple related changes in transactions, you'll get the consistency without the transaction. If you switch to dirty read, it could read tables in the middle of a transaction, between modifications, and get one table's modified data and another table's not-yet modified data. – madreflection Jul 28 '23 at 14:43
  • 1
    @madreflection And what happens if two *different* users make two *unrelated* modifications? You will still get inconsistent results. It's just not true that read queries don't need transactions, it's just that in this particular case it's all a single query anyway. – Charlieface Jul 28 '23 at 14:45
  • Thank you @Charlieface for pointing out the SearchQuery mistake, I was gonna get to that :D That was the point, I agree that sometimes you gotta use two transactions or one, but I can also see how it is pointless here. I'm just gonna create two transactions and connect data from products with orders in the business layer :D If I want a scalable app, can't go wrong with that, right? – PhantomPhreak1995 Jul 28 '23 at 15:26
  • 1
    A transaction will not capture (checkpoint) the state of tables at the beginning of the transaction. If something is modified between two SELECTs, even if those SELECTs are within a transaction, the second will get the newly committed data. You would need to lock the tables before the SELECTs, which would block any transactions that modify them from being committed until the SELECTs are completed, and then unlock them when they're done . Whether you did that explicitly, or the transaction did it implicitly, it would klll performance. – madreflection Jul 28 '23 at 15:48
  • 1
    @PhantomPhreak1995: Use zero transactions here. Think about it: what do you do when you're done with a transaction that only queries data? Commit? Rollback? There are no changes to either write to the table(s) or abandon. It makes no sense. – madreflection Jul 28 '23 at 15:50
  • @madreflection ok, I deleted the transaction part, but the IN clause recommended by Charlieface is what I needed also, gonna try that and abstract these two methods into the business layer - using NO transactions :D – PhantomPhreak1995 Jul 28 '23 at 16:14
  • I'm not sure how Postgres deals with execution plans, but I know SQL Server would be hit a bit hard the way you're doing it because it would have to build a separate execution plan for every single variation of parameters provided (i.e. every distinct `CommandText` value). I would use a single query, use conditions like `(@MinPice IS NULL OR o.TotalPrice > @MinPrice)` and so on, and add the parameter as `DBNull.Value` when the criterion is not included. – madreflection Jul 28 '23 at 16:22
  • @madreflection That's likely to be worse off, even for SQL Server. It's usually *better* to get a new excution plan per combination of conditions (as long as you parameterize those conditions) there will be no more than 8 different execution plans in this case. Whereas bunging it all into one is likely to cause poor execution plans as well as parameter sniffing problems. See https://www.sentryone.com/blog/aaronbertrand/backtobasics-updated-kitchen-sink-example – Charlieface Jul 28 '23 at 17:43
  • @madreflection That's true under `READ COMMITTED`, but under `SERIALIZABLE` you may be joining the same tables again and will therefore get the correct consistency (either before the commit or after the commit). Under `SNAPSHOT` you would indeed get the state of the tables as they were at the beginning of the transaction. Without any transaction you have almost no guarantee of anything. – Charlieface Jul 28 '23 at 17:47

1 Answers1

1

It's probably much better to just do one batch, returning just two resultsets, rather than doing effectively N+1 queries (N being the number of orders you get).

Simply fetch all the Orders and store them in a dictionary, then fetch the second resultset and map the Products into the Order using the dictionary to retrieve the relevant object.

You are also not actually doing any pagination. Consider using Keyset Pagination, rather than Rowset Pagination. This means you pass in a starting key (such as an OrderId) and page forward from there, rather than doing the looks-cool-but-really-slow OFFSET...FETCH. See also Is there any better option to apply pagination without applying OFFSET in SQL Server? I haven't added any code for this, but it should fit right in to your other dynamic filter building.

  • Improve readability of your code:
    • Use verbatim strings @"" so you can embed newlines.
    • Use object initializer syntax.
    • Format your SQL properly.
    • Don't quote column names unless necessary.
    • Consider using a mapper such as Dapper to actually convert each row to an object.
  • catch ... throw ex; is just plain wrong. Catching only to re-thro is anyway a waste of time, and throw ex; will wipe the stack trace. At the very least do just throw;. or better don't catch at all if you don't want to handle it.
  • Specify the types of the parameters explicitly.
  • if (reader.HasRows) is unnecessary, the loop won't run at all if there are no rows.
  • You are not setting totalCount, but you don't need it anyway as the list itself has a Count.
  • Use the async version of await reader.ReadAsync(). Consider also passing in a cancellation token.
private async Task<ICollection<Order>> GetOrdersWithDataAsync(OrderFilter filtering, Sorting sorting)
{
    using NpgsqlConnection connection = new NpgsqlConnection(_connectionProvider.GetConnectionString());
    using NpgsqlCommand cmd = new NpgsqlCommand("" , connection);

    var conditions = "WHERE 1 = 1";
    if(!string.IsNullOrEmpty(filtering.SearchQuery))
    {
        conditions += @"
  AND pe.FirstName LIKE '%' + @SearchQuery+ '%' OR pe.LastName LIKE '%' + @SearchQuery+ '%'");
        cmd.Parameters.Add("@SearchQuery", NpgsqlDbType.Text).Value = filtering.SearchQuery;
    } 
    if(filtering.MinPrice != 0)
    {
        conditions += @"
  AND o.TotalPrice > @MinPrice");
        cmd.Parameters.Add("@MinPrice", NpgsqlDbType.Numeric).Value = filtering.MinPrice.Value;
    }
    if (filtering.MaxPrice != 0)
    {
        conditions += @"
  AND o.TotalPrice < @MaxPrice");
        cmd.Parameters.Add("@MaxPrice", NpgsqlDbType.Numeric).Value filtering.MaxPrice;
    }

    cmd.CommandText = @$"
select
  o.Id,
  o.ShippingAddressId,
  o.BillingAddressId,
  o.PersonId,
  o.TotalPrice,
  pe.Id as PersonId,
  pe.FirstName,
  pe.LastName,
  pe.Email,
  sa.Id as ShippingAddressId,
  sa.StreetName as ShippingStreetName,
  sa.StreetNumber as ShippingStreetNumber,
  sa.City as ShippingCity,
  sa.Zipcode as ShippingZipcode,
  ba.Id as BillingAddressId,
  ba.StreetName as BillingStreetName,
  ba.StreetNumber as BillingStreetNumber,
  ba.City as BillingCity,
  ba.Zipcode as BillingZipcode
from ""Order"" o
inner join Person pe on o.PersonId = pe.Id
inner join Address sa on sa.Id = o.ShippingAddressId
inner join Address ba on ba.Id = o.BillingAddressId
{conditions}
;

SELECT
  p.Id,
  p.Name,
  p.Price,
  po.ProductQty as Quantity,
  po.OrderId
FROM Product as p
JOIN ProductOrder as po ON p.Id = po.ProductId
JOIN ""Order"" o ON o.Id = po.OrderId
inner join Person pe on o.PersonId = pe.Id
{conditions}
";

    var orders = new Dictionary<Guid, Order>();

    await connection.OpenAsync();
    using NpgsqlDataReader reader = await cmd.ExecuteReaderAsync();

    while (await reader.ReadAsync())
    {
        Order order = new Order {
            Products = new List<Product>(),
            Id = (Guid)reader["Id"],
            TotalPrice = (decimal)reader["TotalPrice"],
            Person = new Person {
                Id = (Guid)reader["PersonId"],
                FirstName = (string)reader["FirstName"],
                LastName = (string)reader["LastName"],
                Email = (string)reader["Email"],
            },
            PersonId = (Guid)reader["PersonId"],
            ShippingAddress = new Address( {
                Id = (Guid)reader["ShippingAddressId"],
                StreetName = (string)reader["ShippingStreetName"],
                StreetNumber = (string)reader["ShippingStreetNumber"],
                City = (string)reader["ShippingCity"],
                ZipCode = (int)reader["ShippingZipcode"],
            },
            BillingAddress = new Address {
                Id = (Guid)reader["BillingAddressId"],
                StreetName = (string)reader["BillingStreetName"],
                StreetNumber = (string)reader["BillingStreetNumber"],
                City = (string)reader["BillingCity"],
                ZipCode = (int)reader["BillingZipcode"],
            },
        };

        orders.Add(order.Id, order);
    }

    await reader.NextResultAsync();

    while (await reader.ReadAsync())
    {
        Product product = new Product {
            Id = (Guid)reader["Id"],
            Name = (string)reader["Name"],
            Price = (decimal)reader["Price"],
            Quantity = (int)reader["Quantity"],
        };
        if(orders.TryGetValue((Guid)reader["Id"], order)
            order.Products.Add(product);
    }

    return orders.Values;
}

A similar option might be to use Postgres' JSON functionality to construct a JSON array of Products. This means you can do it all in one batch. I'm not sure the exact way to get data out of that using NpgSql, but the Postgres syntax would use something like JSON_AGG aggregation in a subquery.

Charlieface
  • 52,284
  • 6
  • 19
  • 43
  • You won't believe, but that's exactly what I did yesterday afternoon! :D Well, to be honest I used business layer to go to OrderRepo, fetch those, and then I sent Guids to ProductRepo, made a dictionary and just inserted the List into every order! I know my error handling doesn't exist, gonna check now how to handle them - but you answer was terrific! Thank you for the advices, going to go through them after handling errors. Especially the one about pagination, I used OFFSET but I've been searching for something better also :D Once again, thank you! – PhantomPhreak1995 Jul 30 '23 at 14:11