Parallel.For System.OutOfMemoryException

Posted by Martin Neal on Stack Overflow See other posts from Stack Overflow or by Martin Neal
Published on 2010-06-06T20:48:54Z Indexed on 2010/06/06 20:52 UTC
Read the original article Hit count: 483

We have a fairly simple program that's used for creating backups. I'm attempting to parallelize it but am getting an OutofMemoryException within an AggregateExcption. Some of the source folders are quite large, and the program doesn't crash for about 40 minutes after it starts. I don't know where to start looking so the below code is a near exact dump of all code the code sans directory structure and Exception logging code. Any advise as to where to start looking?

using System;
using System.Diagnostics;
using System.IO;
using System.Threading.Tasks;

namespace SelfBackup
{
class Program
{

static readonly string[] saSrc = { 
    "\\src\\dir1\\",
    //...
    "\\src\\dirN\\", //this folder is over 6 GB
};
static readonly string[] saDest = { 
    "\\dest\\dir1\\",
    //...
    "\\dest\\dirN\\",
};

static void Main(string[] args)
{
Parallel.For(0, saDest.Length, i =>
{
    try
    {
        if (Directory.Exists(sDest))
        {
            //Delete directory first so old stuff gets cleaned up
            Directory.Delete(sDest, true);
        }

        //recursive function 
        clsCopyDirectory.copyDirectory(saSrc[i], sDest);
    }
    catch (Exception e)
    {
        //standard error logging
        CL.EmailError();
    }
});
}
}

///////////////////////////////////////
using System.IO;
using System.Threading.Tasks;

namespace SelfBackup
{
static class clsCopyDirectory
{
    static public void copyDirectory(string Src, string Dst)
    {
        Directory.CreateDirectory(Dst);

        /* Copy all the files in the folder
           If and when .NET 4.0 is installed, change 
           Directory.GetFiles to Directory.Enumerate files for 
           slightly better performance.*/
        Parallel.ForEach<string>(Directory.GetFiles(Src), file =>
        {
            /* An exception thrown here may be arbitrarily deep into 
               this recursive function there's also a good chance that
               if one copy fails here, so too will other files in the 
               same directory, so we don't want to spam out hundreds of 
               error e-mails but we don't want to abort all together. 
               Instead, the best solution is probably to throw back up 
               to the original caller of copy directory an move on to 
               the next Src/Dst pair by not catching any possible
               exception here.*/
            File.Copy(file, //src
                      Path.Combine(Dst, Path.GetFileName(file)), //dest
                      true);//bool overwrite
        });

        //Call this function again for every directory in the folder.
        Parallel.ForEach(Directory.GetDirectories(Src), dir =>
        {
            copyDirectory(dir, Path.Combine(Dst, Path.GetFileName(dir)));
        });
    }
}

© Stack Overflow or respective owner

Related posts about c#3.0

Related posts about parallel