var fsw = new FileSystemWatcher(sPath, "*.PPF");
fsw.NotifyFilter = NotifyFilters.FileName;
fsw.IncludeSubdirectories = true;
fsw.Created += FswCreated;
fsw.EnableRaisingEvents = true;
static void FswCreated(object sender, FileSystemEventArgs e)
{
string sFile = e.FullPath;
string[] arrLines = File.ReadAllLines(sFile);
}
this fails with large files, because the process is not finnished with writing the file. The file is copied via the Network, so i dont know the size of the file. What kind of syncronisation is required to make this robust?
Solution found on stackoverflow and modified it a bit.
static bool IsFileLocked(FileInfo file)
{
FileStream stream = null;
try
{
stream = file.Open(FileMode.Open,
FileAccess.ReadWrite, FileShare.None);
}
catch (IOException)
{
//the file is unavailable because it is:
//still being written to
//or being processed by another thread
//or does not exist (has already been processed)
return true;
}
finally
{
if (stream != null)
stream.Close();
}
//file is not locked
return false;
}
static void FswCreated(object sender, FileSystemEventArgs e)
{
string sFile = e.FullPath;
Console.WriteLine("processing file : " + sFile);
// Wait if file is still open
FileInfo fileInfo = new FileInfo(sFile);
while(IsFileLocked(fileInfo))
{
Thread.Sleep(500);
}
string[] arrLines = File.ReadAllLines(sFile);
}