Hi,
After searching in gogle I tried the below code to read a large text file using Background worker.
const string dataFile = @"F:\test.txt";
string line;
public Form1()
{
InitializeComponent();
InitializeBackgroundWorker();
}
private void InitializeBackgroundWorker()
{
backgroundWorker1.DoWork +=
new DoWorkEventHandler(backgroundWorker1_DoWork);
backgroundWorker1.RunWorkerCompleted +=
new RunWorkerCompletedEventHandler(
backgroundWorker1_RunWorkerCompleted);
backgroundWorker1.ProgressChanged +=
new ProgressChangedEventHandler(
backgroundWorker1_ProgressChanged_1);
}
private void button1_Click(object sender, EventArgs e)
{
backgroundWorker1.RunWorkerAsync();
}
private void backgroundWorker1_DoWork(object sender, DoWorkEventArgs e)
{
StringBuilder sb = new StringBuilder();
using (FileStream fs = File.Open(dataFile, FileMode.Open, FileAccess.Read, FileShare.ReadWrite))
using (BufferedStream bs = new BufferedStream(fs))
using (StreamReader sr = new StreamReader(bs))
{
while ((line = sr.ReadLine()) != null)//Exception of type System.OutOfMemoryException' was thrown."
{
if (backgroundWorker1.CancellationPending)//checks for cancel request
{
e.Cancel = true;
break;
}
sb.AppendLine(line + "\n");
e.Result = sb;
}
}
}
private void backgroundWorker1_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
{
if (e.Cancelled)//it doesn't matter if the BG worker ends normally, or gets cancelled,
{ //both cases RunWorkerCompleted is invoked, so we need to check what has happened
MessageBox.Show("You've cancelled the backgroundworker!");
}
else
{
richTextBox1.AppendText(e.Result.ToString());
MessageBox.Show("Done");
}
}
private void backgroundWorker1_ProgressChanged_1(object sender, ProgressChangedEventArgs e)
{
}
private void button2_Click(object sender, EventArgs e)
{
backgroundWorker1.CancelAsync();//makes the backgroundworker stop
}
}
The above code works fine with 600kb to 1mb files.
But OutOfMemoryException is thrown when I try to read data from 1.5 gb text file. Also I have used StreamReader.ReadLine() method since it is a large file. From exception thrown I am able to gauge why it failed. Is there any way to read the large file in chunks without data loss?
BR,
Arjun