I have an ASP.NET Core 3.0 Web API endpoint that I have set up to allow me to post large audio files. I have followed the following directions from MS docs to set up the endpoint.
https://learn.microsoft.com/en-us/aspnet/core/mvc/models/file-uploads?view=aspnetcore-3.0#kestrel-maximum-request-body-size
When an audio file is uploaded to the endpoint, it is streamed to an Azure Blob Storage container.
My code works as expected locally.
When I push it to my production server in Azure App Service on Linux, the code does not work and errors with
Unhandled exception in request pipeline: System.Net.Http.HttpRequestException: An error occurred while sending the request. ---> Microsoft.AspNetCore.Server.Kestrel.Core.BadHttpRequestException: Request body too large.
Per advice from the above article, I have configured incrementally updated Kesterl with the following:
.ConfigureWebHostDefaults(webBuilder =>
{
webBuilder.UseKestrel((ctx, options) =>
{
var config = ctx.Configuration;
options.Limits.MaxRequestBodySize = 6000000000;
options.Limits.MinRequestBodyDataRate =
new MinDataRate(bytesPerSecond: 100,
gracePeriod: TimeSpan.FromSeconds(10));
options.Limits.MinResponseDataRate =
new MinDataRate(bytesPerSecond: 100,
gracePeriod: TimeSpan.FromSeconds(10));
options.Limits.RequestHeadersTimeout =
TimeSpan.FromMinutes(2);
}).UseStartup<Startup>();
Also configured FormOptions to accept files up to 6000000000
services.Configure<FormOptions>(options =>
{
options.MultipartBodyLengthLimit = 6000000000;
});
And also set up the API controller with the following attributes, per advice from the article
[HttpPost("audio", Name="UploadAudio")]
[DisableFormValueModelBinding]
[GenerateAntiforgeryTokenCookie]
[RequestSizeLimit(6000000000)]
[RequestFormLimits(MultipartBodyLengthLimit = 6000000000)]
Finally, here is the action itself. This giant block of code is not indicative of how I want the code to be written but I have merged it into one method as part of the debugging exercise.
public async Task<IActionResult> Audio()
{
if (!MultipartRequestHelper.IsMultipartContentType(Request.ContentType))
{
throw new ArgumentException("The media file could not be processed.");
}
string mediaId = string.Empty;
string instructorId = string.Empty;
try
{
// process file first
KeyValueAccumulator formAccumulator = new KeyValueAccumulator();
var streamedFileContent = new byte[0];
var boundary = MultipartRequestHelper.GetBoundary(
MediaTypeHeaderValue.Parse(Request.ContentType),
_defaultFormOptions.MultipartBoundaryLengthLimit
);
var reader = new MultipartReader(boundary, Request.Body);
var section = await reader.ReadNextSectionAsync();
while (section != null)
{
var hasContentDispositionHeader = ContentDispositionHeaderValue.TryParse(
section.ContentDisposition, out var contentDisposition);
if (hasContentDispositionHeader)
{
if (MultipartRequestHelper
.HasFileContentDisposition(contentDisposition))
{
streamedFileContent =
await FileHelpers.ProcessStreamedFile(section, contentDisposition,
_permittedExtensions, _fileSizeLimit);
}
else if (MultipartRequestHelper
.HasFormDataContentDisposition(contentDisposition))
{
var key = HeaderUtilities.RemoveQuotes(contentDisposition.Name).Value;
var encoding = FileHelpers.GetEncoding(section);
if (encoding == null)
{
return BadRequest($"The request could not be processed: Bad Encoding");
}
using (var streamReader = new StreamReader(
section.Body,
encoding,
detectEncodingFromByteOrderMarks: true,
bufferSize: 1024,
leaveOpen: true))
{
// The value length limit is enforced by
// MultipartBodyLengthLimit
var value = await streamReader.ReadToEndAsync();
if (string.Equals(value, "undefined",
StringComparison.OrdinalIgnoreCase))
{
value = string.Empty;
}
formAccumulator.Append(key, value);
if (formAccumulator.ValueCount >
_defaultFormOptions.ValueCountLimit)
{
return BadRequest($"The request could not be processed: Key Count limit exceeded.");
}
}
}
}
// Drain any remaining section body that hasn't been consumed and
// read the headers for the next section.
section = await reader.ReadNextSectionAsync();
}
var form = formAccumulator;
var file = streamedFileContent;
var results = form.GetResults();
instructorId = results["instructorId"];
string title = results["title"];
string firstName = results["firstName"];
string lastName = results["lastName"];
string durationInMinutes = results["durationInMinutes"];
//mediaId = await AddInstructorAudioMedia(instructorId, firstName, lastName, title, Convert.ToInt32(duration), DateTime.UtcNow, DateTime.UtcNow, file);
string fileExtension = "m4a";
// Generate Container Name - InstructorSpecific
string containerName = $"{firstName[0].ToString().ToLower()}{lastName.ToLower()}-{instructorId}";
string contentType = "audio/mp4";
FileType fileType = FileType.audio;
string authorName = $"{firstName} {lastName}";
string authorShortName = $"{firstName[0]}{lastName}";
string description = $"{authorShortName} - {title}";
long duration = (Convert.ToInt32(durationInMinutes) * 60000);
// Generate new filename
string fileName = $"{firstName[0].ToString().ToLower()}{lastName.ToLower()}-{Guid.NewGuid()}";
DateTime recordingDate = DateTime.UtcNow;
DateTime uploadDate = DateTime.UtcNow;
long blobSize = long.MinValue;
try
{
// Update file properties in storage
Dictionary<string, string> fileProperties = new Dictionary<string, string>();
fileProperties.Add("ContentType", contentType);
// update file metadata in storage
Dictionary<string, string> metadata = new Dictionary<string, string>();
metadata.Add("author", authorShortName);
metadata.Add("tite", title);
metadata.Add("description", description);
metadata.Add("duration", duration.ToString());
metadata.Add("recordingDate", recordingDate.ToString());
metadata.Add("uploadDate", uploadDate.ToString());
var fileNameWExt = $"{fileName}.{fileExtension}";
var blobContainer = await _cloudStorageService.CreateBlob(containerName, fileNameWExt, "audio");
try
{
MemoryStream fileContent = new MemoryStream(streamedFileContent);
fileContent.Position = 0;
using (fileContent)
{
await blobContainer.UploadFromStreamAsync(fileContent);
}
}
catch (StorageException e)
{
if (e.RequestInformation.HttpStatusCode == 403)
{
return BadRequest(e.Message);
}
else
{
return BadRequest(e.Message);
}
}
try
{
foreach (var key in metadata.Keys.ToList())
{
blobContainer.Metadata.Add(key, metadata[key]);
}
await blobContainer.SetMetadataAsync();
}
catch (StorageException e)
{
return BadRequest(e.Message);
}
blobSize = await StorageUtils.GetBlobSize(blobContainer);
}
catch (StorageException e)
{
return BadRequest(e.Message);
}
Media media = Media.Create(string.Empty, instructorId, authorName, fileName, fileType, fileExtension, recordingDate, uploadDate, ContentDetails.Create(title, description, duration, blobSize, 0, new List<string>()), StateDetails.Create(StatusType.STAGED, DateTime.MinValue, DateTime.UtcNow, DateTime.MaxValue), Manifest.Create(new Dictionary<string, string>()));
// upload to MongoDB
if (media != null)
{
var mapper = new Mapper(_mapperConfiguration);
var dao = mapper.Map<ContentDAO>(media);
try
{
await _db.Content.InsertOneAsync(dao);
}
catch (Exception)
{
mediaId = string.Empty;
}
mediaId = dao.Id.ToString();
}
else
{
// metadata wasn't stored, remove blob
await _cloudStorageService.DeleteBlob(containerName, fileName, "audio");
return BadRequest($"An issue occurred during media upload: rolling back storage change");
}
if (string.IsNullOrEmpty(mediaId))
{
return BadRequest($"Could not add instructor media");
}
}
catch (Exception ex)
{
return BadRequest(ex.Message);
}
var result = new { MediaId = mediaId, InstructorId = instructorId };
return Ok(result);
}
I reiterate, this all works great locally. I do not run it in IISExpress, I run it as a console app.
I submit large audio files via my SPA app and Postman and it works perfectly.
I am deploying this code to an Azure App Service on Linux (as a Basic B1).
Since the code works in my local development environment, I am at a loss of what my next steps are. I have refactored this code a few times but I suspect that it's environment related.
I cannot find anywhere that mentions that the level of App Service Plan is the culprit so before I go out spending more money I wanted to see if anyone here had encountered this challenge and could provide advice.
UPDATE: I attempted upgrading to a Production App Service Plan to see if there was an undocumented gate for incoming traffic. Upgrading didn't work either.
Thanks in advance.
-A
Currently, as of 11/2019, there is a limitation with the Azure App Service for Linux. It's CORS functionality is enabled by default and cannot be disabled AND it has a file size limitation that doesn't appear to get overridden by any of the published Kestrel configurations. The solution is to move the Web API app to a Azure App Service for Windows and it works as expected.
I am sure there is some way to get around it if you know the magic combination of configurations, server settings, and CLI commands but I need to move on with development.
So i have a problem. I am running an embedded apache.activemq.broker in my application which has a topic. i have one producer which sends small messages to the topic and consumer consumes them.
The problem is the applications memory footprint just keeps growing and growing to the point when it takes up several gigabytes of memory after some days. I did a memory profiling with JProfiler and noticed that alot of instances of type ActiveMQTextMessage are kept in the memory.
This is how i set up my broker
BrokerService brokerService = new BrokerService();
brokerService.setUseJmx(false);
brokerService.setUseLocalHostBrokerName(false);
brokerService.addConnector(tenantConfiguration.getConnectionString());
brokerService.setBrokerName(tenantConfiguration.getBrokerComponentIdentifier());
brokerService.setPersistenceAdapter(persistenceAdapterFromConnectionString);
SystemUsage systemUsage = new SystemUsage();
brokerService.setSystemUsage(systemUsage);
brokerService.setDestinationPolicy(createDestinationPolicyForBrokerService());
And here is how i set up destination policy
private PolicyMap createDestinationPolicyForBrokerService() {
PolicyMap policyMap = new PolicyMap();
List<PolicyEntry> policyEntries = new ArrayList<>();
ConstantPendingMessageLimitStrategy constantPendingMessageLimitStrategy = new ConstantPendingMessageLimitStrategy();
constantPendingMessageLimitStrategy.setLimit(10);
PolicyEntry queuePolicyEntry = new PolicyEntry();
queuePolicyEntry.setPrioritizedMessages(true);
queuePolicyEntry.setGcInactiveDestinations(true);
queuePolicyEntry.setInactiveTimoutBeforeGC(86400);
queuePolicyEntry.setQueue(">");
queuePolicyEntry.setPendingMessageLimitStrategy(constantPendingMessageLimitStrategy);
PolicyEntry topicPolicyEntry = new PolicyEntry();
topicPolicyEntry.setTopic(">");
topicPolicyEntry.setGcInactiveDestinations(true);
topicPolicyEntry.setInactiveTimoutBeforeGC(5000);
topicPolicyEntry.setPendingMessageLimitStrategy(constantPendingMessageLimitStrategy);
topicPolicyEntry.setUseCache(false);
policyEntries.add(queuePolicyEntry);
policyEntries.add(topicPolicyEntry);
policyMap.setPolicyEntries(policyEntries);
return policyMap;
}
Here is screenshot of one of messages outgoing references
Message
And here is an image when i click on "Show paths to GC root"
Gc root
EDIT:
Here is how i setup the DurableConsumer
private NMSConnectionFactory _connnectionFactory;
private IConnection _connection;
private ISession _session;
public void Start()
{
_connection = _connnectionFactory.CreateConnection(queueUser, queuePwd);
_connection.Start();
_session = _connection.CreateSession(AcknowledgementMode.AutoAcknowledge);
if (!string.IsNullOrEmpty(TopicName))
{
_topicConsumer = _session.CreateDurableConsumer(SessionUtil.GetTopic(_session, TopicName), ConsumerName, null, false);
_topicConsumer.Listener += TopicConsumerOnListener;
}
}
And this is how we i publish messages to topic
public void PublishMessage(string message)
{
using (var connection = _connnectionFactory.CreateConnection(user, pwd))
{
try
{
connection.Start();
ActiveMQTopic topic = new ActiveMQTopic(TopicName);
using (var session = connection.CreateSession())
using (var producer = session.CreateProducer(topic))
{
var textMessage = producer.CreateTextMessage(message);
producer.Send(textMessage);
}
}
catch (Exception exception)
{
Console.WriteLine(exception);
}
}
}
Does anyone know why the messages are not being removed after they are consumed?
Thanks
Solved the problem by adding my own clientId to connection of topic
_connection = _connnectionFactory.CreateConnection(queueUser, queuePwd);
_connection.ClientId = "MY CLIENT ID";
_connection.Start();
That way no new consumer rows are created on restart.
I developed a card reader application using ASP.net MVC5. The card reader used by is HID OMNIKEY 3121. When the card is inserted this application will read the name,gender,dob etc that is encoded in the chip. This is working fine in my local system and I am able to show it in a view.
Then I publish the same to IIS in a server. Then I call the MVC website from my local(client) system. The card reader is connected to the local system but when click the read data it is giving a blank page.
Any configuration issue? Please guide me
Edited
Controller
public class cardController : Controller
{
public ActionResult Index()
{
try
{
ReaderManagement readerMgr = new ReaderManagement();
readerMgr.EstablishContext();//extablishing card connection API
try
{
readerMgr.DiscoverReaders(); //discover card reader connected
}
catch(Exception ex)
{
return RedirectToAction("Nocard", "card");
}
PCSCReader[] readers = readerMgr.Readers;
PCSCReader selectedReader = readerMgr.SelectReaderByName(readers[0].ReaderName);
//Other select methods may be called...
selectedReader.IsConnected();
IDCardWrapper.LoadConfiguration();
bool IsCardConnected = selectedReader.IsConnected();
bool isContactless;
if (IsCardConnected) isContactless = selectedReader.IsContactless();
if (!IsCardConnected)
{
readerMgr.SelectReaderByName(readers[0].ReaderName);
try
{
selectedReader.Connect(readerMgr.Context);
}
catch(Exception ex)
{
return RedirectToAction("Nocard", "card");
}
}
CardInfo cardInfo = selectedReader.GetCardInfo();
try
{
PublicDataFacade publicDataFacade = selectedReader.GetPublicDataFacade();
CardHolderPublicData publicData = publicDataFacade.ReadPublicData(true, true, true, true, false);
ViewBag.sex = PublicDataUtils.GetSex(Utils.ByteArrayToUTF8String(publicData.Sex));
ViewBag.maritalstatus = PublicDataUtils.GetMaritalStatus(Utils.ByteArrayToHex(publicData.MaritalStatus, ""));
ViewBag.sponsortype = PublicDataUtils.GetSponsorType(Utils.ByteArrayToHex(publicData.SponsorType, ""));
ViewBag.dob = Utils.ByteArrayToStringDate(publicData.DateOfBirth);
ViewBag.fullname = PublicDataUtils.RemoveCommas(Utils.ByteArrayToUTF8String(publicData.FullName));
ViewBag.arabicname = PublicDataUtils.RemoveCommas(Utils.ByteArrayToUTF8String(publicData.ArabicFullName));
}
readerMgr.CloseContext();
}
catch (Exception ex) //(MiddlewareException ex)
{
}
return View();
}
View
Simply show the viewbag data assigned.
You can't access a local device (like a smart card reader) form a web site.
You'll have to install a desktop application on the client computer (as stated by #Ashley and #Basic) : Chrome App or Windows App for instance.
I am adding a TFS WorkItemSaveListener but not getting any Event on saving workitem.
public static void main(String[] args) {
// Connecting to Project
final TFSTeamProjectCollection collection = ConsoleSettings.connectToTFS();
// Creating an object of listener
WorkItemSaveListenerImpl listener = new WorkItemSaveListenerImpl();
//Adding the listener
collection.getWorkItemClient().getEventEngine().addWorkItemSaveListener(listener);
for(;;) {
// keeping the program alive
try {
Thread.sleep(10000);
}
catch (InterruptedException exception) {
// TODO Auto-generated catch block
exception.printStackTrace();
}
}
}
Only really guessing here as I don't know the java sdk. But is it possible that the addWorkItemSaveListener event is only triggered for work items changed by that particular work item client?
You may need to setup a soap subscription, or write a server plugin instead.
C# to setup a soap subscription
Sorry it's for the wrong event, but it may be enough to give you an idea.
TfsTeamProjectCollection tpc = TfsTeamProjectCollectionFactory.GetTeamProjectCollection(new Uri(txtServerUrl.Text));
tpc.EnsureAuthenticated();
IEventService eventSrv = tpc.GetService(typeof(IEventService)) as IEventService;
DeliveryPreference delPref = new DeliveryPreference();
delPref.Address = "http://" + System.Environment.MachineName + ":8001/CheckInNotify";
delPref.Schedule = DeliverySchedule.Immediate;
delPref.Type = DeliveryType.Soap;
subscriptionId = eventSrv.SubscribeEvent(System.Environment.UserDomainName + "\\" + System.Environment.UserName, "CheckInNotify", "", delPref);
I have a requirement to run an application through my MVC controller. To get the installation path I used following link (I used answer provided by Fredrik Mörk). It worked and I could able to run the exe through a process. The problem occurred when I deployed this solution on IIS where it did not create the process as it was creating in local dev environment. Can anybody tell me how to create a windows process through a solution which is hosted on IIS ?
private string GetPathForExe(string fileName)
{
private const string keyBase = #"SOFTWARE\Wow6432Node\MyApplication";
RegistryKey localMachine = Registry.LocalMachine;
RegistryKey fileKey = localMachine.OpenSubKey(string.Format(#"{0}\{1}", keyBase, fileName));
object result = null;
if (fileKey != null)
{
result = fileKey.GetValue("InstallPath");
}
fileKey.Close();
return (string)result;
}
public void StartMyApplication()
{
Process[] pname = Process.GetProcessesByName("MyApplication");
if (pname.Length == 0)
{
string appDirectory = GetPathForExe("MyApplication");
Directory.SetCurrentDirectory(appDirectory);
ProcessStartInfo procStartInfo = new ProcessStartInfo("MyApplication.exe");
procStartInfo.WindowStyle = ProcessWindowStyle.Hidden;
Process proc = new Process();
proc.StartInfo = procStartInfo;
proc.Start();
}
}