I have to transfer files from FTP to an Azure File Storage. My code works fine, but I'm transferring those files in memory which is not a best practice. So first I read the stream to an Byte
array in memory. Then I upload the output to an Azure file storage.
Now I know it's better to do this asynchronicaly. But I don't know if this is possible and how to do it.
My code:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.WindowsAzure.Storage;
using System.Configuration;
using Microsoft.WindowsAzure.Storage.File;
using System.IO;
using Microsoft.Azure;
using System.Net;
namespace TransferFtpToAzure
{
class Program
{
public static void Main(string[] args)
{
List<FileName> sourceFileList = new List<FileName>();
List<FileName> targetFileList = new List<FileName>();
string targetShareReference = ConfigurationManager.AppSettings["AzureShare"];
string targetDirectoryReference = ConfigurationManager.AppSettings["Environment"] + "/" + Enums.AzureFolders.Mos + "/" + Enums.AzureFolders.In;
string sourceURI = (ConfigurationManager.AppSettings["FtpConnectionString"] + ConfigurationManager.AppSettings["Environment"].ToUpper() +"/"+ Enums.FtpFolders.Mos + "/").Replace("\\","/");
string sourceUser = ConfigurationManager.AppSettings["FtpServerUserName"];
string sourcePass = ConfigurationManager.AppSettings["FtpServerPassword"];
getFileLists(sourceURI, sourceUser, sourcePass, sourceFileList, targetShareReference, targetDirectoryReference, targetFileList);
Console.WriteLine(sourceFileList.Count + " files found!");
CheckLists(sourceFileList, targetFileList);
targetFileList.Sort();
Console.WriteLine(sourceFileList.Count + " unique files on sourceURI" + Environment.NewLine + "Attempting to move them.");
foreach (var file in sourceFileList)
{
try
{
CopyFile(file.fName, sourceURI, sourceUser, sourcePass, targetShareReference, targetDirectoryReference);
}
catch
{
Console.WriteLine("There was move error with : " + file.fName);
}
}
}
public class FileName : IComparable<FileName>
{
public string fName { get; set; }
public int CompareTo(FileName other)
{
return fName.CompareTo(other.fName);
}
}
public static void CheckLists(List<FileName> sourceFileList, List<FileName> targetFileList)
{
for (int i = 0; i < sourceFileList.Count; i++)
{
if (targetFileList.BinarySearch(sourceFileList[i]) > 0)
{
sourceFileList.RemoveAt(i);
i--;
}
}
}
public static void getFileLists(string sourceURI, string sourceUser, string sourcePass, List<FileName> sourceFileList, string targetShareReference, string targetDirectoryReference, List<FileName> targetFileList)
{
string line = "";
/////////Source FileList
FtpWebRequest sourceRequest;
sourceRequest = (FtpWebRequest)WebRequest.Create(sourceURI);
sourceRequest.Credentials = new NetworkCredential(sourceUser, sourcePass);
sourceRequest.Method = WebRequestMethods.Ftp.ListDirectory;
sourceRequest.UseBinary = true;
sourceRequest.KeepAlive = false;
sourceRequest.Timeout = -1;
sourceRequest.UsePassive = true;
FtpWebResponse sourceRespone = (FtpWebResponse)sourceRequest.GetResponse();
//Creates a list(fileList) of the file names
using (Stream responseStream = sourceRespone.GetResponseStream())
{
using (StreamReader reader = new StreamReader(responseStream))
{
line = reader.ReadLine();
while (line != null)
{
var fileName = new FileName
{
fName = line
};
sourceFileList.Add(fileName);
line = reader.ReadLine();
}
}
}
/////////////Target FileList
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("StorageConnectionString"));
CloudFileClient fileClient = storageAccount.CreateCloudFileClient();
//var test = fileClient.ListShares();
CloudFileShare fileShare = fileClient.GetShareReference(targetShareReference);
if (fileShare.Exists())
{
CloudFileDirectory rootDirectory = fileShare.GetRootDirectoryReference();
if (rootDirectory.Exists())
{
CloudFileDirectory customDirectory = rootDirectory.GetDirectoryReference(targetDirectoryReference);
if (customDirectory.Exists())
{
var fileCollection = customDirectory.ListFilesAndDirectories().OfType<CloudFile>();
foreach (var item in fileCollection)
{
var fileName = new FileName
{
fName = item.Name
};
targetFileList.Add(fileName);
}
}
}
}
}
public static void CopyFile(string fileName, string sourceURI, string sourceUser, string sourcePass, string targetShareReference, string targetDirectoryReference)
{
try
{
FtpWebRequest request = (FtpWebRequest)WebRequest.Create(sourceURI + fileName);
request.Method = WebRequestMethods.Ftp.DownloadFile;
request.Credentials = new NetworkCredential(sourceUser, sourcePass);
FtpWebResponse response = (FtpWebResponse)request.GetResponse();
Stream responseStream = response.GetResponseStream();
Upload(fileName, ToByteArray(responseStream), targetShareReference, targetDirectoryReference);
responseStream.Close();
}
catch
{
Console.WriteLine("There was an error with :" + fileName);
}
}
public static Byte[] ToByteArray(Stream stream)
{
MemoryStream ms = new MemoryStream();
byte[] chunk = new byte[4096];
int bytesRead;
while ((bytesRead = stream.Read(chunk, 0, chunk.Length)) > 0)
{
ms.Write(chunk, 0, bytesRead);
}
return ms.ToArray();
}
public static bool Upload(string FileName, byte[] Image, string targetShareReference, string targetDirectoryReference)
{
try
{
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("StorageConnectionString"));
CloudFileClient fileClient = storageAccount.CreateCloudFileClient();
//var test = fileClient.ListShares();
CloudFileShare fileShare = fileClient.GetShareReference(targetShareReference);
if (fileShare.Exists())
{
CloudFileDirectory rootDirectory = fileShare.GetRootDirectoryReference();
if (rootDirectory.Exists())
{
CloudFileDirectory customDirectory = rootDirectory.GetDirectoryReference(targetDirectoryReference);
if (customDirectory.Exists())
{
var cloudFile = customDirectory.GetFileReference(FileName);
using (var stream = new MemoryStream(Image, writable: false))
{
cloudFile.UploadFromStream(stream);
}
}
}
}
return true;
}
catch
{
return false;
}
}
}
}
If I understand you correctly, you want to avoid storing the file in memory between the download and upload.
For that see:
Azure function to copy files from FTP to blob storage.
Using Azure Storage File Share this is the only way it worked for me without loading the entire ZIP into Memory. I tested with a 3GB ZIP File (with thousands of files or with a big file inside) and Memory/CPU was low and stable. I hope it helps!
var zipFiles = _directory.ListFilesAndDirectories()
.OfType<CloudFile>()
.Where(x => x.Name.ToLower().Contains(".zip"))
.ToList();
foreach (var zipFile in zipFiles)
{
using (var zipArchive = new ZipArchive(zipFile.OpenRead()))
{
foreach (var entry in zipArchive.Entries)
{
if (entry.Length > 0)
{
CloudFile extractedFile = _directory.GetFileReference(entry.Name);
using (var entryStream = entry.Open())
{
byte[] buffer = new byte[16 * 1024];
using (var ms = extractedFile.OpenWrite(entry.Length))
{
int read;
while ((read = entryStream.Read(buffer, 0, buffer.Length)) > 0)
{
ms.Write(buffer, 0, read);
}
}
}
}
}
}
}
来源:https://stackoverflow.com/questions/47599211/transfer-files-directly-from-ftp-to-azure-file-storage-without-keeping-them-loca