API return image - c#

I am trying to fetch a byte array from the database and return it as image like this
public IActionResult Get(string PictureCode)
{
SqlParameter[] parameters = {new SqlParameter("#PictureCode", PictureCode) };
var PD = db.Picture.FromSql("SIP_API_MONDIA_ItemPicture_sel #PictureCode", parameters).ToList();
var contents = new MemoryStream(PD[0].PictureData);
return File(contents, "image/png");
}
Edit:
1) PD is of type PictureModel
var PD = db.Picture.FromSql("SIP_API_MONDIA_ItemPicture_sel
#PictureCode", parameters).ToList();
Picture Model:
public class PictureModel
{
[Key]
public byte[] PictureData { get; set; }
}
Error:
Edit2: Stored procedure sample:
GO
ALTER Procedure [dbo].[SIP_API_MONDIA_ItemPicture_sel]
(
#PictureCode int = null
)
As
Begin
If #PictureCode is not null
Begin
Select t1.PictureData From T_Picture t1 Where t1.PictureCode = #PictureCode
End
return 0
End
Where PictureData is type of varbinary(max) in T_Picture
Edit3: The Middleware, where the error(see print screen) happens:
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
{
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
// Add middleware before MVC
app.Use(async (ctx, next) =>
{
// Capture the original response body stream
var responseStream = ctx.Response.Body;
// Replace it with our own, so that we can read it
using (var bodyStream = new MemoryStream())
{
ctx.Response.Body = bodyStream;
//if (ctx.Request.Path.Value.Contains("customers") && !ctx.Request.QueryString.Value.Contains("?$expand"))
//{
// ctx.Response.Redirect($"" + ctx.Request.Path.Value + "?$expand=FreeFields,Contact,Address");
//}
// Run ASP.NET MVC (ie. get the results back from your code)
await next();
// Put the original response body stream back
ctx.Response.Body = responseStream;
// Read the one that we captured
bodyStream.Seek(0, SeekOrigin.Begin);
var responseBody = await new StreamReader(bodyStream).ReadToEndAsync();
// If it's ODATA & JSON & 200 (success), replace the "value" with "results"
if (ctx.Response.ContentType.Contains("application/json") && ctx.Response.ContentType.Contains("odata") && ctx.Response.StatusCode == 200)
{
responseBody = responseBody.Replace("\"value\"", "\"results\"");
}
// Write back the response body (whether modified or not) to the original response stream
await ctx.Response.WriteAsync(responseBody);
}
});
app.UseMvc(b =>
{
b.Select().Expand().Filter().OrderBy().MaxTop(100).Count();
b.MapODataServiceRoute("odata", "odata", GetEdmModel());
});
}

You can do:
var contents= new MemoryStream(PD[0].PictureData); // since PictureData is a byte array
After that you can put the stream into File() method:
return File(contents, "CorrectMimeTypeHere"); // image/png or image/jpeg

Related

Large File download from SQL via WebApi after custom MultipartFormDataStreamProvider upload

This is a follow up to a question I had asked previously that was closed for being too broad.Previous Question
In that question I explained that I needed upload a large file (1-3GB) to the database by storing chunks as individual rows. I did this by overriding the MultipartFormDataStreamProvider.GetStream method. That method returned a custom stream that wrote the buffered chunks to the database.
The problem is that the overriden GetStream method is writing the entire request to the database (including the headers). It is successfully writing that data while keeping the Memory levels flat but when I download the file, in addition to the file contents, it's returning all the header information in the downloaded file contents so the file can't be opened.
Is there a way to, in the overriden GetStream method, write just the contents of the file to the database without writing the headers?
API
[HttpPost]
[Route("file")]
[ValidateMimeMultipartContentFilter]
public Task<HttpResponseMessage> PostFormData()
{
var provider = new CustomMultipartFormDataStreamProvider();
// Read the form data and return an async task.
var task = Request.Content.ReadAsMultipartAsync(provider).ContinueWith<HttpResponseMessage>(t =>
{
if (t.IsFaulted || t.IsCanceled)
{
Request.CreateErrorResponse(HttpStatusCode.InternalServerError, t.Exception);
}
return Request.CreateResponse(HttpStatusCode.OK);
});
return task;
}
[HttpGet]
[Route("file/{id}")]
public async Task<HttpResponseMessage> GetFile(string id)
{
var result = new HttpResponseMessage()
{
Content = new PushStreamContent(async (outputStream, httpContent, transportContext) =>
{
await WriteDataChunksFromDBToStream(outputStream, httpContent, transportContext, id);
}),
StatusCode = HttpStatusCode.OK
};
result.Content.Headers.ContentType = new MediaTypeHeaderValue("application/zipx");
result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = "test response.zipx" };
return result;
}
return new HttpResponseMessage(HttpStatusCode.BadRequest);
}
private async Task WriteDataChunksFromDBToStream(Stream responseStream, HttpContent httpContent, TransportContext transportContext, string fileIdentifier)
{
// PushStreamContent requires the responseStream to be closed
// for signaling it that you have finished writing the response.
using (responseStream)
{
using (var myConn = new SqlConnection(System.Configuration.ConfigurationManager.ConnectionStrings["TestDB"].ConnectionString))
{
await myConn.OpenAsync();
using (var myCmd = new SqlCommand("ReadAttachmentChunks", myConn))
{
myCmd.CommandType = System.Data.CommandType.StoredProcedure;
var fileName = new SqlParameter("#Identifier", fileIdentifier);
myCmd.Parameters.Add(fileName);
// Read data back from db in async call to avoid OutOfMemoryException when sending file back to user
using (var reader = await myCmd.ExecuteReaderAsync(CommandBehavior.SequentialAccess))
{
while (await reader.ReadAsync())
{
if (!(await reader.IsDBNullAsync(3)))
{
using (var data = reader.GetStream(3))
{
// Asynchronously copy the stream from the server to the response stream
await data.CopyToAsync(responseStream);
}
}
}
}
}
}
}// close response stream
}
Custom MultipartFormDataStreamProvider GetStream method implementation
public override Stream GetStream(HttpContent parent, HttpContentHeaders headers)
{
// For form data, Content-Disposition header is a requirement
ContentDispositionHeaderValue contentDisposition = headers.ContentDisposition;
if (contentDisposition != null)
{
// If we have a file name then write contents out to AWS stream. Otherwise just write to MemoryStream
if (!String.IsNullOrEmpty(contentDisposition.FileName))
{
var identifier = Guid.NewGuid().ToString();
var fileName = contentDisposition.FileName;// GetLocalFileName(headers);
if (fileName.Contains("\\"))
{
fileName = fileName.Substring(fileName.LastIndexOf("\\") + 1).Replace("\"", "");
}
// We won't post process files as form data
_isFormData.Add(false);
var stream = new CustomSqlStream();
stream.Filename = fileName;
stream.Identifier = identifier;
stream.ContentType = headers.ContentType.MediaType;
stream.Description = (_formData.AllKeys.Count() > 0 && _formData["description"] != null) ? _formData["description"] : "";
return stream;
//return new CustomSqlStream(contentDisposition.Name);
}
// We will post process this as form data
_isFormData.Add(true);
// If no filename parameter was found in the Content-Disposition header then return a memory stream.
return new MemoryStream();
}
throw new InvalidOperationException("Did not find required 'Content-Disposition' header field in MIME multipart body part..");
#endregion
}
Implemented Write method of Stream called by CustomSqlStream
public override void Write(byte[] buffer, int offset, int count)
{
//write buffer to database
using (var myConn = new SqlConnection(System.Configuration.ConfigurationManager.ConnectionStrings["TestDB"].ConnectionString)) {
using (var myCmd = new SqlCommand("WriteAttachmentChunk", myConn)) {
myCmd.CommandType = System.Data.CommandType.StoredProcedure;
var pContent = new SqlParameter("#Content", buffer);
myCmd.Parameters.Add(pContent);
myConn.Open();
myCmd.ExecuteNonQuery();
if (myConn.State == System.Data.ConnectionState.Open)
{
myConn.Close();
}
}
}
((ManualResetEvent)_dataAddedEvent).Set();
}
The "ReadAttachmentChunks" stored procedure gets the rows respective to the file from the db ordered by the time they are inserted into the database. So, the way the code works is it pulls those chunks back and then async writes it back to the PushStreamContent to go back to the user.
So my question is:
Is there a way to write ONLY the content of the file being uploaded as opposed to the headers in addition to the content?
Any help would be greatly appreciated. Thank you.
I finally figured it out. I over-complicated the write process which brought about most of the struggle. Here is my solution to my initial issue:
To keep .net from buffering the file in memory (so that you can handle large file uploads), you first need to override the WebHostBufferPolicySelector so that it doesnt buffer the input stream for your controller and then replace the BufferPolicy Selector.
public class NoBufferPolicySelector : WebHostBufferPolicySelector
{
public override bool UseBufferedInputStream(object hostContext)
{
var context = hostContext as HttpContextBase;
if (context != null)
{
if (context.Request.RequestContext.RouteData.Values["controller"] != null)
{
if (string.Equals(context.Request.RequestContext.RouteData.Values["controller"].ToString(), "upload", StringComparison.InvariantCultureIgnoreCase))
return false;
}
}
return true;
}
public override bool UseBufferedOutputStream(HttpResponseMessage response)
{
return base.UseBufferedOutputStream(response);
}
}
then for replacing the BufferPolicy Selector
GlobalConfiguration.Configuration.Services.Replace(typeof(IHostBufferPolicySelector), new NoBufferPolicySelector());
Then to avoid the default behavior of having the file stream written to disk, you need to provide a stream provider that will write to the database instead. To do this you inherit MultipartStreamProvider and override the GetStream method to return the stream that will write to your database.
public override Stream GetStream(HttpContent parent, HttpContentHeaders headers)
{
// For form data, Content-Disposition header is a requirement
ContentDispositionHeaderValue contentDisposition = headers.ContentDisposition;
if (contentDisposition != null && !String.IsNullOrEmpty(contentDisposition.FileName))
{
// We won't post process files as form data
_isFormData.Add(false);
//create unique identifier for this file upload
var identifier = Guid.NewGuid();
var fileName = contentDisposition.FileName;
var boundaryObj = parent.Headers.ContentType.Parameters.SingleOrDefault(a => a.Name == "boundary");
var boundary = (boundaryObj != null) ? boundaryObj.Value : "";
if (fileName.Contains("\\"))
{
fileName = fileName.Substring(fileName.LastIndexOf("\\") + 1).Replace("\"", "");
}
//write parent container for the file chunks that are being stored
WriteLargeFileContainer(fileName, identifier, headers.ContentType.MediaType, boundary);
//create an instance of the custom stream that will write the chunks to the database
var stream = new CustomSqlStream();
stream.Filename = fileName;
stream.FullFilename = contentDisposition.FileName.Replace("\"", "");
stream.Identifier = identifier.ToString();
stream.ContentType = headers.ContentType.MediaType;
stream.Boundary = (!string.IsNullOrEmpty(boundary)) ? boundary : "";
return stream;
}
else
{
// We will post process this as form data
_isFormData.Add(true);
// If no filename parameter was found in the Content-Disposition header then return a memory stream.
return new MemoryStream();
}
}
The custom stream you create needs to inherit Stream and override the Write method. This is where I overthought the problem and thought I needed to parse out the boundary headers that were passed via the buffer parameter. But this is actually done for you by leveraging the offset and count parameters.
public override void Write(byte[] buffer, int offset, int count)
{
//no boundary is inluded in buffer
byte[] fileData = new byte[count];
Buffer.BlockCopy(buffer, offset, fileData, 0, count);
WriteData(fileData);
}
From there, it's just plugging in the api methods for upload and download.
For upload:
public Task<HttpResponseMessage> PostFormData()
{
var provider = new CustomMultipartLargeFileStreamProvider();
// Read the form data and return an async task.
var task = Request.Content.ReadAsMultipartAsync(provider).ContinueWith<HttpResponseMessage>(t =>
{
if (t.IsFaulted || t.IsCanceled)
{
Request.CreateErrorResponse(HttpStatusCode.InternalServerError, t.Exception);
}
return Request.CreateResponse(HttpStatusCode.OK);
});
return task;
}
For download, and in order to keep the memory footprint low, I leveraged the PushStreamContent to push the chunks back to the user:
[HttpGet]
[Route("file/{id}")]
public async Task<HttpResponseMessage> GetFile(string id)
{
string mimeType = string.Empty;
string filename = string.Empty;
if (!string.IsNullOrEmpty(id))
{
//get the headers for the file being sent back to the user
using (var myConn = new SqlConnection(System.Configuration.ConfigurationManager.ConnectionStrings["PortalBetaConnectionString"].ConnectionString))
{
using (var myCmd = new SqlCommand("ReadLargeFileInfo", myConn))
{
myCmd.CommandType = System.Data.CommandType.StoredProcedure;
var pIdentifier = new SqlParameter("#Identifier", id);
myCmd.Parameters.Add(pIdentifier);
myConn.Open();
var dataReader = myCmd.ExecuteReader();
if (dataReader.HasRows)
{
while (dataReader.Read())
{
mimeType = dataReader.GetString(0);
filename = dataReader.GetString(1);
}
}
}
}
var result = new HttpResponseMessage()
{
Content = new PushStreamContent(async (outputStream, httpContent, transportContext) =>
{
//pull the data back from the db and stream the data back to the user
await WriteDataChunksFromDBToStream(outputStream, httpContent, transportContext, id);
}),
StatusCode = HttpStatusCode.OK
};
result.Content.Headers.ContentType = new MediaTypeHeaderValue(mimeType);// "application/octet-stream");
result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = filename };
return result;
}
return new HttpResponseMessage(HttpStatusCode.BadRequest);
}
private async Task WriteDataChunksFromDBToStream(Stream responseStream, HttpContent httpContent, TransportContext transportContext, string fileIdentifier)
{
// PushStreamContent requires the responseStream to be closed
// for signaling it that you have finished writing the response.
using (responseStream)
{
using (var myConn = new SqlConnection(System.Configuration.ConfigurationManager.ConnectionStrings["PortalBetaConnectionString"].ConnectionString))
{
await myConn.OpenAsync();
//stored proc to pull the data back from the db
using (var myCmd = new SqlCommand("ReadAttachmentChunks", myConn))
{
myCmd.CommandType = System.Data.CommandType.StoredProcedure;
var fileName = new SqlParameter("#Identifier", fileIdentifier);
myCmd.Parameters.Add(fileName);
// The reader needs to be executed with the SequentialAccess behavior to enable network streaming
// Otherwise ReadAsync will buffer the entire BLOB into memory which can cause scalability issues or even OutOfMemoryExceptions
using (var reader = await myCmd.ExecuteReaderAsync(CommandBehavior.SequentialAccess))
{
while (await reader.ReadAsync())
{
//confirm the column that has the binary data of the file returned is not null
if (!(await reader.IsDBNullAsync(0)))
{
//read the binary data of the file into a stream
using (var data = reader.GetStream(0))
{
// Asynchronously copy the stream from the server to the response stream
await data.CopyToAsync(responseStream);
await data.FlushAsync();
}
}
}
}
}
}
}// close response stream
}
Ugh. This is nasty. With the upload, you have to make sure to
separate the headers from the content portion - you must follow the requirements RFC documents for HTTP.
Allow for chunked transfers
Of course, the content portion (unless you are transmitting text) will be binary encoded into strings.
Allow for transfers that are compressed, i.e. GZIP or DEFLATE.
Maybe - just maybe - take the encoding into account (ASCII, Unicode, UTF8, etc).
You can't really ensure that you're persisting the right info to the database without looking at all of these. For the latter items, all of your metadata as to what to do will be somewhere in the header, so it's not just a throwaway.

Downloading Blob, All Files are corrupt

I am trying to download a file that I have been able to save. I will not post all of the code, but I will post the relevant code.
First, let me explain what is happening. I have an angular 7 application, with a C# web API backend, and I'm storing the files in the database using Entity Framework.
//I save file to cache here
public async Task<string> UploadTempDocument()
{
if(!Request.Content.IsMimeMultipartContent())
throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
var provider = new MultipartMemoryStreamProvider();
await Request.Content.ReadAsMultipartAsync(provider);
var key = string.Empty;
foreach (var file in provider.Contents)
{
var buffer = await file.ReadAsByteArrayAsync();
key = Guid.NewGuid().ToString();
_cache.AddCacheItem(key, buffer);
}
return key;
}
// Get File From Cache here and save to database
public override IHttpActionResult Post(Document entity)
{
var bytes = _cache.GetItemFromCache<byte[]>(entity.TempUploadKey);
entity.Content = bytes;
return base.Post(entity);
}
//Download document here
[HttpGet, Route("api/Documents/DownloadDocument/{id}")]
public async Task<IHttpActionResult> DownloadDocument(int id)
{
byte[] documentBytes = (await Repository.GetAsync(id)).Content;
return Ok(documentBytes);
}
//download the document here
download(documentId: number): void {
const options = { responseType: Blob }
//get the document object without bytes here
this.get(`api/documents/${documentId}`).toPromise().then(result=>{
const document:Document = result;
//download the bytes here
this.httpClient
.get<Blob>(`api/documents/DownloadDocument/${documentId}`)
.subscribe((resultBlob:Blob)=>{
let applicationType:string = `application/${document.fileExtension}`;
let blob = new Blob([resultBlob], { type: applicationType } );
saveAs(blob, document.fileName);
});
});
}
So my question: What am I doing wrong here? The file saves and opens, but always says corrupt or cannot load.

File transfer between AWS EC2 MVC app and Lambda Serverless Web API app results in corrupted data

I have a MVC application which I am hosting on a Windows Server 2016 AWS EC2 instance. This application is an admin tool. This application uses a Web API application that is hosted as a AWS Lambda Serverless app (https://docs.aws.amazon.com/toolkit-for-visual-studio/latest/user-guide/lambda-build-test-severless-app.html).
One area of my MVC app allows users to upload images using a form file input. This is then posted back to the MVC controller and sent off to an API utility which sends the file to the API. The API then resizes (using Magick.NET) and saves the image to an S3 bucket and the resulting URL to a MySQL database.
This all works perfectly when running locally on my machine. The problem is when I try to upload an image on the live website. The result is that when the image data is loaded into a MagickImage, I get the following error:
ImageMagick.MagickCorruptImageErrorException: Not a JPEG file: starts
with 0xef 0xbf `' # error/jpeg.c/JPEGErrorHandler/332\n
I added in some code to log the first 20 bytes of the data (which is a byte array) both in the MVC app (before the file is posted to the API) and in the API once the file was received. I discovered that the values I received were completely different, as shown below:
MVC: FF-D8-FF-E0-00-10-4A-46-49-46-00-01-01-01-01-2C-01-2C-00-00
API: EF-BF-BD-EF-BF-BD-EF-BF-BD-EF-BF-BD-00-10-4A-46-49-46-00-01
I then did the following when running locally and saw that the values outputted were the same:
MVC: FF-D8-FF-E0-00-10-4A-46-49-46-00-01-01-01-01-2C-01-2C-00-00
API: FF-D8-FF-E0-00-10-4A-46-49-46-00-01-01-01-01-2C-01-2C-00-00
Is there some sort of environment setting that I need to set/change which could be causing this strange behaviour?
Below are the different sections of code that are relevant, in order of occurrence.
MVC controller:
public async Task<IActionResult> AddImage(ImageFormViewModel viewModel)
{
if (!ModelState.IsValid)
{
return RedirectToAction("Details", new { id = viewModel.TourId, errors = string.Join(",", ViewData.ModelState.Values.SelectMany(x => x.Errors.Select(y => y.ErrorMessage))) });
}
var apiResponse = await this.api.PostFile<ApiResponse>($"tours/{viewModel.TourId}/images", viewModel.Image);
if (apiResponse.Success)
{
return RedirectToAction("Details", new { id = viewModel.TourId, message = "Image added successfully!" });
}
{
return RedirectToAction("Details", new { id = viewModel.TourId, errors = string.Join(",", apiResponse.Errors) });
}
}
API Utility (In MVC app):
public async Task<TResponse> PostFile<TResponse>(string uri, IFormFile file) where TResponse : ApiResponse
{
var response = default(TResponse);
if (file != null && file.Length > 0)
{
var url = (!string.IsNullOrWhiteSpace(uri) ? new Uri(new Uri(this.baseUrl), uri) : new Uri(this.baseUrl)).ToString();
using (var http = new HttpClient())
{
byte[] data;
using (var stream = new MemoryStream())
{
await file.CopyToAsync(stream);
data = stream.ToArray();
}
this.logger.Information("First bytes (MVC app): " + BitConverter.ToString(data.Take(20).ToArray()));
var content = new MultipartFormDataContent();
content.Add(new ByteArrayContent(data), "file", file.FileName);
var httpResponse = await http.PostAsync(url, content);
response = JsonConvert.DeserializeObject<TResponse>(await httpResponse.Content.ReadAsStringAsync());
}
}
return response;
}
API controller:
[HttpPost]
public async Task<IActionResult> Post([FromRoute]string tourId)
{
var response = new ApiResponse();
if (Request.HasFormContentType)
{
var form = Request.Form;
foreach (var formFile in form.Files)
{
using (var stream = new MemoryStream())
{
await formFile.CopyToAsync(stream);
var result = await this.tourManagementService.AddImage(HttpUtility.UrlDecode(tourId), Path.GetFileNameWithoutExtension(formFile.FileName), stream.ToArray());
if (!result.Success)
{
...
}
}
}
}
return Ok(response);
}
Service method to save image etc:
public async Task<AddImageResult> AddImage(string tourId, string imageName, byte[] imageData)
{
this.logger.Information("First bytes (API): " + BitConverter.ToString(imageData.Take(20).ToArray()));
...
}
Code where Magick.NET is used and exception is thrown:
private byte[] resizeImage(byte[] imageData, int width, int height)
{
using (var image = new MagickImage(imageData, new MagickReadSettings { Format = MagickFormat.Jpeg }))
{
...
}
}
The problem turned out to be that my AWS API Gateway wasn't accepting binary data. By default, API Gateway treats the message body as JSON as explained here - https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-payload-encodings.html
By default, API Gateway treats the message body as a text payload and
applies any preconfigured mapping template to transform the JSON
string.
I believe the is the source of the corruption. To remedy this, I had to add "image/jpeg" as an accepted binary media type in API Gateway shown below:
I then adjusted my code to just deal with binary data (and scrapped the form content stuff):
MVC side:
public async Task<TResponse> PostFile<TResponse>(string uri, IFormFile file) where TResponse : ApiResponse
{
var response = default(TResponse);
if (file != null && file.Length > 0)
{
var url = (!string.IsNullOrWhiteSpace(uri) ? new Uri(new Uri(this.baseUrl), uri) : new Uri(this.baseUrl)).ToString();
using (var http = new HttpClient())
{
var content = new StreamContent(file.OpenReadStream());
content.Headers.ContentType = new MediaTypeHeaderValue(file.ContentType);
var httpResponse = await http.PostAsync(url, content);
response = JsonConvert.DeserializeObject<TResponse>(await httpResponse.Content.ReadAsStringAsync());
}
}
return response;
}
API side:
[HttpPost]
public async Task<IActionResult> Post([FromRoute]string tourId)
{
var response = new ApiResponse();
if (Request.ContentType.Equals("image/jpeg"))
{
using (var stream = new MemoryStream())
{
await Request.Body.CopyToAsync(stream);
...
}
}
else
{
...
}
return Ok(response);
}

Modify content of requested file in ASP.NET core 2.0

I am trying to change the body of specified requested files using code below. The index.html file is correctly sended to client, but other files included in head section are empty on client's side (files are shown in sources tab in chrome). Kestrel do not report any errors. Could you explain me why?
public static Func<HttpContext, Func<Task>, Task> Rewrite()
{
return async (context, next) =>
{
if (context.Request.Path.Value == "/")
{
var originalStream = context.Response.Body;
using (Stream bufferStream = new FileStream("./wwwroot/index.html", FileMode.Open, FileAccess.Read))
{
context.Response.Body = bufferStream;
bufferStream.Seek(0, SeekOrigin.Begin);
using (var reader = new StreamReader(bufferStream))
{
var response = await reader.ReadToEndAsync();
response = response.Replace("[ddd]", "<script type=\"text/javascript\" src=\"lib/jquery/dist/jquery.js\"></script>");
using (var writer = new StreamWriter(originalStream))
{
await writer.WriteAsync(response);
}
}
}
}
else
{
await next();
}
};
}
Of course this function is registered in configuration file
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
{
app.Use(ResponseStreamRewriter.Rewrite());
}

Mock Web API HTTPResponseMessage C# Unit Testing

I want to create a unit test method for the below method which is receiving a file upload "text file" data and parsing it, I tried to use Moq and created a method but I am still very confused in the concept, I need a sample code, I've read many stackover flow questions but it is all for Controllers not Web API
the method used
// Enable both Get and Post so that our jquery call can send data, and get a status
[HttpGet]
[HttpPost]
public HttpResponseMessage Upload()
{
// Get a reference to the file that our jQuery sent. Even if multiple files, they will
// all be their own request and be the 0 index
if (HttpContext.Current.Request.Files.Count>0)
{
HttpPostedFile file = HttpContext.Current.Request.Files[0];
if (file != null && file.ContentLength > 0)
{
try
{
var extension = Path.GetExtension(file.FileName);
if (!IsFileFormatSupported(extension))
{
var objectSerialized = SerializeData(GetError( GlobalResources.NotSupportedFileExtension));
return BadResponse(objectSerialized);
}
var path = SaveFileGetPath(file);
var result = GetPaySlips(path);
var SerializedData = SerializeData(result);
return OkResponse(SerializedData);
}
catch (System.Exception exception)
{
var SerializedData = SerializeData(GetError( GlobalResources.CouldNotReadFile + " " + exception.Message));
return BadResponse(SerializedData);
}
}
else
{
var SerializedData = SerializeData(GetError(file.FileName + " " + GlobalResources.FileisCorrupt));
return BadResponse(SerializedData);
}
}else
{
var SerializedData = SerializeData(GetError( GlobalResources.FileisCorrupt));
return BadResponse(SerializedData);
}
}
the code I did so far
var FileUploadCtrl = new UploadController();
Mock<HttpRequestMessage> cc = new Mock<HttpRequestMessage>();
UTF8Encoding enc = new UTF8Encoding();
// Mock<HttpPostedFileBase> file1 = new Mock<HttpPostedFileBase>();
//file1.Expect(f=>f.InputStream).Returns(file1.Object.InputStream);
//cc.Expect(ctx => ctx.Content).Returns(new retur);
// cc.Expect(ctx => ctx.Content).Returns();
var content = new ByteArrayContent( /* bytes in the file */ );
content.Headers.Add("Content-Disposition", "form-data");
var controllerContext = new HttpControllerContext
{
Request = new HttpRequestMessage
{
Content = new MultipartContent { content }
}
};
//file1.Expect(d => d.FileName).Returns("FileTest.csv");
//file1.Expect(d => d.InputStream).Returns(new HttpResponseMessage(HttpStatusCode.OK)));
var config = new HttpConfiguration();
//var request = new HttpRequestMessage(HttpMethod.Post, "http://localhost/api/upload");
var route = config.Routes.MapHttpRoute("DefaultApi", "api/{controller}/{id}");
var routeData = new HttpRouteData(route, new HttpRouteValueDictionary { { "controller", "FileUpload" } });
FileUploadCtrl.ControllerContext = new HttpControllerContext(config, routeData, cc.Object);
var r = FileUploadCtrl.Upload();
Assert.IsInstanceOfType(r, typeof(HttpResponseMessage));

Resources