I need to upload files (with different extensions ie .txt, .jpg, .pdf....) on a server.
I created a site that accepts http requests an maps a virtual directory to a physical one.
all thi works fine in dowload, now I have to implement the upload.
here is my code
private void UploadFile(string uploadFileName, string localFileName)
{
//long length = 0;
string boundary = "----------------------------" + DateTime.Now.Ticks.ToString("x");
HttpWebRequest httpWebRequest2 = (HttpWebRequest)WebRequest.Create(uploadFileName);
httpWebRequest2.ContentType = "multipart/form-data; boundary=" + boundary;
httpWebRequest2.Method = "POST";
httpWebRequest2.KeepAlive = true;
httpWebRequest2.Credentials = new NetworkCredential("USER", "PASSWORD", "DOMAIN");
Stream memStream = new System.IO.MemoryStream();
byte[] boundarybytes = System.Text.Encoding.ASCII.GetBytes("\r\n--" +boundary + "\r\n");
string formdataTemplate = "\r\n--" + boundary +"\r\nContent-Disposition: form-data; name=\"{0}\";\r\n\r\n{1}";
memStream.Write(boundarybytes, 0, boundarybytes.Length);
string headerTemplate = "Content-Disposition: form-data; name=\"{0}\"; filename=\"{1}\"\r\n Content-Type: application/octet-stream\r\n\r\n";
string header = string.Format(headerTemplate, "uplTheFile", localFileName);
byte[] headerbytes = System.Text.Encoding.UTF8.GetBytes(header);
memStream.Write(headerbytes, 0, headerbytes.Length);
FileStream fileStream = new FileStream(localFileName, FileMode.Open, FileAccess.Read);
byte[] buffer = new byte[1024];
int bytesRead = 0;
while ((bytesRead = fileStream.Read(buffer, 0, buffer.Length)) != 0)
{
memStream.Write(buffer, 0, bytesRead);
}
memStream.Write(boundarybytes, 0, boundarybytes.Length);
fileStream.Close();
httpWebRequest2.ContentLength = memStream.Length;
Stream requestStream = httpWebRequest2.GetRequestStream();
//error returned in lenght field: "This stream does not support seek operations."
memStream.Position = 0;
byte[] tempBuffer = new byte[memStream.Length];
memStream.Read(tempBuffer, 0, tempBuffer.Length);
memStream.Close();
requestStream.Write(tempBuffer, 0, tempBuffer.Length);
requestStream.Close();
WebResponse webResponse2 = httpWebRequest2.GetResponse();
//error returned from getResponse: "The remote server returned an error: (405) Method Not Allowed."
//I guess cause my folder is in read only
Stream stream2 = webResponse2.GetResponseStream();
StreamReader reader2 = new StreamReader(stream2);
MessageBox.Show(reader2.ReadToEnd());
webResponse2.Close();
httpWebRequest2 = null;
webResponse2 = null;
}
firstly I got this error: The remote server returned an error: (405) Method Not Allowed.
so I tried to enable POST by adding a mapping onto the web site.
now in the folder on te server I have a web.config file that is:
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<system.webServer>
<directoryBrowse enabled="true" />
<handlers>
<add name="File TXT" path="*.*" verb="*" modules="IsapiModule" scriptProcessor="C:\Windows\System32\inetsrv\asp.dll" resourceType="Unspecified" requireAccess="Script" preCondition="bitness64" />
</handlers>
</system.webServer>
</configuration>
in this way I do not get any error, but the application does not upload any file.
how do I solve this problem?
A much simpler solution would be to use the System.Net.Webclient class to upload the file.
System.Net.WebClient client = new System.Net.WebClient();
client.UploadFile("www.myfileuploadendpoint.com",#"c:\myfiletoupload.txt");
The client also has an OnUploadFileCompleted event that will call a handler when its done.
client.OnUploadFileCompleted += UploadCompleted(sender,args) => { //do whatever you want}
This will save you code and bugs. Good Luck! :)
Both of the above sample not working,
"The remote server returned an error: (405) Method Not Allowed." is the exception caught on trying both the above codes
Expect someone to share their thought on these exceptions, Please.
rgds,
thiru
Related
public HttpWebResponse PushFileToWistia(byte[] contentFileByteArray, string fileName)
{
StringBuilder postDataBuilder = new StringBuilder();
postDataBuilder.Append("I am appending all the wistia config and setting here");
byte[] postData = null;
using (MemoryStream postDataStream = new MemoryStream())
{
byte[] postDataBuffer = Encoding.UTF8.GetBytes(postDataBuilder.ToString());
postDataStream.Write(postDataBuffer, 0, postDataBuffer.Length);
postDataStream.Write(contentFileByteArray, 0, contentFileByteArray.Length);
postDataBuffer = Encoding.UTF8.GetBytes("\r\n--" + boundary + "--");
postDataStream.Write(postDataBuffer, 0, postDataBuffer.Length);
postData = postDataStream.ToArray();
}
ServicePointManager.Expect100Continue = false;
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(AppConfig.WistiaCustomCourseBucket);
request.Method = "POST";
request.Expect = String.Empty;
request.Headers.Clear();
request.ContentType = "multipart/form-data; boundary=" + boundary;
request.ContentLength = postData.Length;
Stream requestStream = request.GetRequestStream();
requestStream.Write(postData, 0, postData.Length); //for file > 100mb this call throws and error --the requet was aborted. the request was canceled.
requestStream.Flush();
requestStream.Close();
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
return response;
}
The above code works for video file mp4 less then 50mb. But when I try to upload a 100mb file it throws and exception (Request was aborted.) I need to support file size up to 1.5gb So now I am not sure if this approach is correct for such a big file size upload. Any suggestions in the right direction will be helpful...thanks(I am trying to upload the file to Wistia Server)
The exception is thrown at this line
-- requestStream.Write(postData, 0, postData.Length);
I have tried changing the web.config setting but didn't work:
httpRuntime targetFramework="4.5" maxRequestLength="2048576" executionTimeout="12000" requestLengthDiskThreshold="1024"
------Async Call-------
MemoryStream wistiaFileStream = null;
using (MemoryStream postDataStream = new MemoryStream())
{
postDataStream.Write(contentFileByteArray, 0, contentFileByteArray.Length);
wistiaFileStream = postDataStream;
postDataStream.Flush();
postDataStream.Close();
}
Stream requestStream = await request.GetRequestStreamAsync();
await requestStream.WriteAsync(wistiaMetadata, 0, wistiaMetadata.Length);
using (wistiaFileStream)
{
byte[] wistiaFileBuffer = new byte[500*1024];
int wistiaFileBytesRead = 0;
while (
(wistiaFileBytesRead =
await wistiaFileStream.ReadAsync(wistiaFileBuffer, 0, wistiaFileBuffer.Length)) != 0)
{
await requestStream.WriteAsync(wistiaFileBuffer, 0, wistiaFileBytesRead);
await requestStream.FlushAsync();
}
await requestStream.WriteAsync(requestBoundary, 0, requestBoundary.Length);
}
I would suggest moving to async and write file directly from file system to request in order to avoid triple buffering of 1.5GB in memory (warning below is not tested).
public async Task<HttpWebResponse> PushFileToWistiaAsync(string contentFilePath)
{
string boundary = "---------------------------" + DateTime.Now.Ticks.ToString("x");
string contentBoundary = "\r\n--" + boundary + "\r\n";
StringBuilder wistiaMetadataBuilder = new StringBuilder();
wistiaMetadataBuilder.Append("--" + boundary + "\r\n");
// Append all the wistia config and setting here
byte[] wistiaMetadata = Encoding.UTF8.GetBytes(wistiaMetadataBuilder.ToString());
byte[] requestBoundary = Encoding.UTF8.GetBytes(contentBoundary);
ServicePointManager.Expect100Continue = false;
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(AppConfig.WistiaCustomCourseBucket);
request.Method = "POST";
request.Headers.Clear();
request.Expect = String.Empty;
request.ContentType = "multipart/form-data; boundary=" + boundary;
Stream requestStream = await request.GetRequestStreamAsync();
await requestStream.WriteAsync(wistiaMetadata, 0, wistiaMetadata.Length);
using (FileStream wistiaFileStream = new FileStream(contentFilePath, FileMode.Open, FileAccess.Read))
{
byte[] wistiaFileBuffer = new byte[500 * 1024];
int wistiaFileBytesRead = 0;
while ((wistiaFileBytesRead = await wistiaFileStream.ReadAsync(wistiaFileBuffer, 0, wistiaFileBuffer.Length)) != 0)
{
await requestStream.WriteAsync(wistiaFileBuffer, 0, wistiaFileBytesRead);
await requestStream.FlushAsync();
}
}
await requestStream.WriteAsync(requestBoundary, 0, requestBoundary.Length);
return (HttpWebResponse)(await request.GetResponseAsync());
}
You should play with buffer sizes, amount of data you read at once and request.SendChunked to achieve reasonable performance.
Here is another approach (not asynchronous so possibly worst scalability) which wirtes directly from buffer to request:
public HttpWebResponse PushFileToWistia(byte[] contentFileByteArray)
{
string boundary = "---------------------------" + DateTime.Now.Ticks.ToString("x");
string contentBoundary = "\r\n--" + boundary + "\r\n";
StringBuilder wistiaMetadataBuilder = new StringBuilder();
wistiaMetadataBuilder.Append("--" + boundary + "\r\n");
// Append all the wistia config and setting here
byte[] wistiaMetadata = Encoding.UTF8.GetBytes(wistiaMetadataBuilder.ToString());
byte[] requestBoundary = Encoding.UTF8.GetBytes(contentBoundary);
ServicePointManager.Expect100Continue = false;
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(AppConfig.WistiaCustomCourseBucket);
request.Method = "POST";
request.Headers.Clear();
request.Expect = String.Empty;
request.ContentType = "multipart/form-data; boundary=" + boundary;
request.ContentLength = wistiaMetadata.Length + contentFileByteArray.Length + requestBoundary.Length
// You can play with SendChunked and AllowWriteStreamBuffering to control the size of chunks you send and performance
//request.SendChunked = true;
//request.AllowWriteStreamBuffering = false;
int contentFileChunkSize = 500 * 1024;
int contentFileBytesRead = 0;
Stream requestStream = request.GetRequestStream();
requestStream.Write(wistiaMetadata, 0, wistiaMetadata.Length);
while (contentFileBytesRead < contentFileByteArray.Length)
{
if ((contentFileBytesRead + contentFileChunkSize) > contentFileByteArray.Length)
{
contentFileChunkSize = contentFileByteArray.Length - contentFileBytesRead;
}
requestStream.Write(contentFileByteArray, contentFileBytesRead, contentFileChunkSize);
requestStream.Flush();
contentFileBytesRead += contentFileChunkSize;
}
requestStream.Write(requestBoundary, 0, requestBoundary.Length);
requestStream.Close();
// You might need to play with request.Timeout here
return (HttpWebResponse)request.GetResponse();
}
Also if you doing this in web application and you want to use asynchronous approach you need to "async/await" all the way up (so async action in async controller etc.).
In general I would discourage doing this as part of request handling in web application (the total time observed from user perspective would be a sum of uploading to your app and then to Wistia which might be much more than client timeout allows). In such case it is usually better to save the file and schedule some other "background task" to do the upload job.
here i want to point out a code which i found from here Using plupload with MVC3. whose intention is to upload a single file but my requirement is bit different like i need to upload few large file say 3 files and each file size could be 2GB.
[HttpPost]
public ActionResult Upload(int? chunk, string name)
{
var fileUpload = Request.Files[0];
var uploadPath = Server.MapPath("~/App_Data");
chunk = chunk ?? 0;
using (var fs = new FileStream(Path.Combine(uploadPath, name), chunk == 0 ? FileMode.Create : FileMode.Append))
{
var buffer = new byte[fileUpload.InputStream.Length];
fileUpload.InputStream.Read(buffer, 0, buffer.Length);
fs.Write(buffer, 0, buffer.Length);
}
return Json(new { message = "chunk uploaded", name = name });
}
$('#uploader').pluploadQueue({
runtimes: 'html5,flash',
url: '#Url.Action("Upload")',
max_file_size: '5mb',
chunk_size: '1mb',
unique_names: true,
multiple_queues: false,
preinit: function (uploader) {
uploader.bind('FileUploaded', function (up, file, data) {
// here file will contain interesting properties like
// id, loaded, name, percent, size, status, target_name, ...
// data.response will contain the server response
});
}
});
just wonder anyone can tell me what else i need to add in above server side and client side code which enable me to upload multiple large files. thanks
You might well need to add an entry to your web.config file to allow for the large file size (2097152KB = 2GB). The timeout in seconds you can adjust accordingly:
<system.web>
<httpRuntime maxRequestLength="2097152" executionTimeout="3600" />
</system.web>
also you can set request limit (which is in bytes) to be 2GB,
<system.webServer>
<security>
<requestFiltering>
<requestLimits maxAllowedContentLength="2147483648"/>
</requestFiltering>
</security>
</system.webServer>
In my MVC application, I am using a dropzone control to allow users to upload a file from their local system to the SQL database.
if (Request.Files.Count > 0)
{
var name = Request.Files[0].FileName;
var size = Request.Files[0].ContentLength;
var type = Request.Files[0].ContentType;
var fileStream = Request.Files[0].InputStream;
byte[] documentBytes = new byte[fileStream.Length];
fileStream.Read(documentBytes, 0, documentBytes.Length);
Documents databaseDocument = new Documents
{
FileContent = documentBytes,
DocumentName = System.IO.Path.GetFileName(name),
DocumentSize = size,
DocumentType = type
};
bool result = this.updateService.SaveDocument(databaseDocument);
}
"updateService" is actually a reference to the WCF service.
I get the error on the "SaveDocument" call in above code.
I have set uploadReadAheadSize (in applicationHost.config), and maxReceivedMessageSize (in WCF and Web configuration files) as suggested on other forums.
Still this error is not resolving for me.
This gives an error saying "The remote server returned an error: (413) Request Entity Too Large "
You could use a stream as parameter to your service operation if you don't want to have issues with transmitting too large objects.
Service interface:
[ServiceContract]
public interface IStreamedService
{
[OperationContract]
void PrepareUpload(long fileLength, string fileName);
[OperationContract]
void UploadFile(Stream fileStream);
}
Service implementation:
public class StreamedService : IStreamedService
{
private static long lengthOfFile;
private static string nameOfFile;
public void PrepareUpload(long fileLength, string fileName)
{
lengthOfFile = fileLength;
nameOfFile = fileName;
}
public void UploadFile(Stream fileStream)
{
if(lengthOfFile==0 || string.IsNullOrEmpty(nameOfFile))
throw new ArgumentException("Upload must be prepared");
var bytes = new byte[lengthOfFile];
var numberOfBytesToRead = bytes.Length;
var numberOfReadBytes = 0;
while (numberOfBytesToRead > 0)
{
var n = fileStream.Read(bytes, numberOfReadBytes, numberOfBytesToRead);
if (n == 0)
break;
numberOfReadBytes += n;
numberOfBytesToRead -= n;
}
var fsOut = new FileStream(string.Format(#"c:\temp\{0}", nameOfFile), FileMode.Create);
fsOut.Write(bytes, 0, numberOfReadBytes);
fsOut.Close();
}
}
Service config:
system.serviceModel>
<services>
<service name="StreamedService.StreamedService">
<endpoint address="net.tcp://localhost:60000/StreamedService"
binding="netTcpBinding" bindingConfiguration="NewBinding0" contract="Contracts.IStreamedService" />
</service>
</services>
<bindings>
<netTcpBinding>
<binding name="NewBinding0" transferMode="Streamed" maxReceivedMessageSize="67108864" />
</netTcpBinding>
</bindings>
Client implementation:
var proxy = new ChannelFactory<IStreamedService>("MyEndpoint").CreateChannel();
var fs = new FileStream(#"c:\temp\FileToUpload.zip", FileMode.Open);
proxy.PrepareUpload(fs.Length, "uploadedFile.zip");
proxy.UploadFile(fs);
Client config:
<system.serviceModel>
<bindings>
<netTcpBinding>
<binding name="NewBinding0" transferMode="Streamed" maxReceivedMessageSize="67108864" />
</netTcpBinding>
</bindings>
<client>
<endpoint address="net.tcp://localhost:60000/StreamedService"
binding="netTcpBinding" bindingConfiguration="NewBinding0"
contract="Contracts.IStreamedService" name="MyEndpoint">
</endpoint>
</client>
</system.serviceModel>
The above works with basicHttpBinding as well. And you could of course use a MemoryStream on the server side instead of a FileStream, and then deserialize it to some entity that you want to save to a DB.
I'm using the CSOM to upload files to a Sharepoint 365 site.
I've logged in succesfully with Claims based authentication using methods found here "http://www.wictorwilen.se/Post/How-to-do-active-authentication-to-Office-365-and-SharePoint-Online.aspx"
But using SaveBinaryDirect on the ClientContext fails with a 405 due to cookies being attached to request too late.
Another method of using CSOM to upload files is similar to below. But with SP 365, this limits the file size to about 3 meg.
var newFileFromComputer = new FileCreationInformation
{
Content = fileContents,
Url = Path.GetFileName(sourceUrl)
};
Microsoft.SharePoint.Client.File uploadedFile = customerFolder.Files.Add(newFileFromComputer);
context.Load(uploadedFile);
context.ExecuteQuery();
Is there ANY way to do this using CSOM, SP 365 and file sizes up to say 100 meg?
Indeed while trying to upload a file in SharePoint Online which size exceeds 250MB file limit the following exception will occur:
Response received was -1,
Microsoft.SharePoint.Client.InvalidClientQueryExceptionThe request
message is too big. The server does not allow messages larger than
262144000 bytes.
To circumvent this error chunked file upload methods have been introduced which support uploading files larger than 250 MB. In the provided link there is an sample which demonstrates how to utilize it via SharePoint CSOM API.
Supported versions:
SharePoint Online
SharePoint On-Premise 2016 or above
The following example demonstrates how to utilize chunked file upload methods in SharePoint REST API:
class FileUploader
{
public static void ChunkedFileUpload(string webUrl, ICredentials credentials, string sourcePath, string targetFolderUrl, int chunkSizeBytes, Action<long, long> chunkUploaded)
{
using (var client = new WebClient())
{
client.BaseAddress = webUrl;
client.Credentials = credentials;
client.Headers.Add("X-FORMS_BASED_AUTH_ACCEPTED", "f");
var formDigest = RequestFormDigest(webUrl, credentials);
client.Headers.Add("X-RequestDigest", formDigest);
//create an empty file first
var fileName = System.IO.Path.GetFileName(sourcePath);
var createFileRequestUrl = string.Format("/_api/web/getfolderbyserverrelativeurl('{0}')/files/add(url='{1}',overwrite=true)", targetFolderUrl, fileName);
client.UploadString(createFileRequestUrl, "POST");
var targetUrl = System.IO.Path.Combine(targetFolderUrl, fileName);
var firstChunk = true;
var uploadId = Guid.NewGuid();
var offset = 0L;
using (var inputStream = System.IO.File.OpenRead(sourcePath))
{
var buffer = new byte[chunkSizeBytes];
int bytesRead;
while ((bytesRead = inputStream.Read(buffer, 0, buffer.Length)) > 0)
{
if (firstChunk)
{
var endpointUrl = string.Format("/_api/web/getfilebyserverrelativeurl('{0}')/startupload(uploadId=guid'{1}')", targetUrl, uploadId);
client.UploadData(endpointUrl, buffer);
firstChunk = false;
}
else if (inputStream.Position == inputStream.Length)
{
var endpointUrl = string.Format("/_api/web/getfilebyserverrelativeurl('{0}')/finishupload(uploadId=guid'{1}',fileOffset={2})", targetUrl, uploadId, offset);
var finalBuffer = new byte[bytesRead];
Array.Copy(buffer, finalBuffer, finalBuffer.Length);
client.UploadData(endpointUrl, finalBuffer);
}
else
{
var endpointUrl = string.Format("/_api/web/getfilebyserverrelativeurl('{0}')/continueupload(uploadId=guid'{1}',fileOffset={2})", targetUrl, uploadId, offset);
client.UploadData(endpointUrl, buffer);
}
offset += bytesRead;
chunkUploaded(offset, inputStream.Length);
}
}
}
}
public static string RequestFormDigest(string webUrl, ICredentials credentials)
{
using (var client = new WebClient())
{
client.BaseAddress = webUrl;
client.Credentials = credentials;
client.Headers.Add("X-FORMS_BASED_AUTH_ACCEPTED", "f");
client.Headers.Add("Accept", "application/json; odata=verbose");
var endpointUrl = "/_api/contextinfo";
var content = client.UploadString(endpointUrl, "POST");
var data = JObject.Parse(content);
return data["d"]["GetContextWebInformation"]["FormDigestValue"].ToString();
}
}
}
Source code: FileUploader.cs
Usage
var userCredentials = GetCredentials(userName, password);
var sourcePath = #"C:\temp\jellyfish-25-mbps-hd-hevc.mkv"; //local file path
var targetFolderUrl = "/Shared Documents"; //library reltive url
FileUploader.ChunkedFileUpload(webUrl,
userCredentials,
sourcePath,
targetFolderUrl,
1024 * 1024 * 5, //5MB
(offset, size) =>
{
Console.WriteLine("{0:P} completed", (offset / (float)size));
});
References
Always use File Chunking to Upload Files > 250 MB to SharePoint Online
Well, I haven't found a way to do it with the CSOM and that is truly infuriating.
A work around was posted by SEvans at the comments on http://www.wictorwilen.se/Post/How-to-do-active-authentication-to-Office-365-and-SharePoint-Online.aspx .
Basically just do an http put and attach the cookie collection from the claims based authentication. SEvans posted workaround is below
Great piece of code Wichtor. As others have noted, SaveBinaryDirect does not work correctly, as the FedAuth cookies never get attached to the HTTP PUT request that the method generates.
Here's my workaround:
// "url" is the full destination path (including filename, i.e. https://mysite.sharepoint.com/Documents/Test.txt)
// "cookie" is the CookieContainer generated from Wichtor's code
// "data" is the byte array containing the files contents (used a FileStream to load)
System.Net.ServicePointManager.Expect100Continue = false;
HttpWebRequest request = HttpWebRequest.Create(url) as HttpWebRequest;
request.Method = "PUT";
request.Accept = "*/*";
request.ContentType = "multipart/form-data; charset=utf-8";
request.CookieContainer = cookie; request.AllowAutoRedirect = false;
request.UserAgent = "Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0)";
request.Headers.Add("Accept-Language", "en-us");
request.Headers.Add("Translate", "F"); request.Headers.Add("Cache-Control", "no-cache"); request.ContentLength = data.Length;
using (Stream req = request.GetRequestStream())
{ req.Write(data, 0, data.Length); }
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
Stream res = response.GetResponseStream();
StreamReader rdr = new StreamReader(res);
string rawResponse = rdr.ReadToEnd();
response.Close();
rdr.Close();
I am needing to upload a posted file to an FTP file location in my controller.
Here is what I have now.
public ActionResult Upload(HttpPostedFileBase file)
{
string fileName = System.IO.Path.GetFileName(file.FileName);
FtpWebRequest request = (FtpWebRequest)WebRequest.Create("ftp://10.10.0.3"+"/"+fileName);
request.Method = WebRequestMethods.Ftp.UploadFile;
request.Credentials = new NetworkCredential("username", "password");
StreamReader streamReader = new StreamReader(file.InputStream);
byte[] fileContents = Encoding.UTF8.GetBytes(streamReader.ReadToEnd());
streamReader.Close();
request.ContentLength = fileContents.Length;
Stream requestStream = request.GetRequestStream();
requestStream.Write(fileContents, 0, fileContents.Length);
requestStream.Close();
FtpWebResponse response = (FtpWebResponse)request.GetResponse();
.....
}
The file is being uploaded, it has the correct number of pages, however there is no text in the new file. (these are pdfs, I will do validation on the type later, just trying to get it to work now).
Thanks!
You are reading PDF file as if they were text files. Instead try this.
var sourceStream = file.InputStream;
requestStream = request.GetRequestStream();
request.ContentLength = sourceStream.Length;
byte[] buffer = new byte[BUFFER_SIZE];
int bytesRead = sourceStream.Read(buffer, 0, BUFFER_SIZE);
do
{
requestStream.Write(buffer, 0, bytesRead);
bytesRead = sourceStream.Read(buffer, 0, BUFFER_SIZE);
} while (bytesRead > 0);
sourceStream.Close();
requestStream.Close();
response = (FtpWebResponse)request.GetResponse();