Akamai CDN Purging of all files in the site - javascript

I need to Purge all my Images in my Website which is using AKAMAI CDN.
I have written the code for purging one file.
PFB the code for single file purging.
But my requirement is to purge all files in the website.
Can anyone suggest how to achieve this.
Thanks
//
// © 2016 WorldVentures. All rights reserved.
//
using Akamai.EdgeGrid.Auth;
using Akamai.Utils;
using CMS;
using CMS.DataEngine;
using CMS.DocumentEngine;
using System;
using System.Collections.Generic;
using System.IO;
using System.Net;
using System.Text;
// Registers the custom module into the system
[assembly: RegisterModule(typeof(AkamaiPurge))]
public class AkamaiPurge : Module
{
// Module class constructor, the system registers the module under the name "CustomInit"
public AkamaiPurge()
: base("CustomInit")
{
}
// Contains initialization code that is executed when the application starts
protected override void OnInit()
{
base.OnInit();
// Assigns custom handlers to events
DocumentEvents.Insert.After += Document_Insert_After;
DocumentEvents.Update.After += Document_Update_After;
DocumentEvents.Delete.After += Document_Delete_After;
}
/// <summary>
/// Will be triggered when any document is added
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void Document_Insert_After(object sender, DocumentEventArgs e)
{
// PurgeCache();
PurgeCacheAkamai();
}
/// <summary>
/// Will be triggered when any document is Updated
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void Document_Update_After(object sender, DocumentEventArgs e)
{
//PurgeCache();
PurgeCacheAkamai();
}
/// <summary>
/// Will be triggered when any document is Deleted
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void Document_Delete_After(object sender, DocumentEventArgs e)
{
//PurgeCache();
PurgeCacheAkamai();
}
/// <summary>
/// Following function will invalidate cache from Akamai server
/// </summary>
public void PurgeCache()
{
string secret = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
string clientToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
string accessToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
string apiurl = "https://akab-7t5mh54r7lq3a7d7-js5q6mdtx42qimcn.purge.akamaiapis.net/ccu/v3/invalidate/url/production";
List<string> headers = new List<string>();
string httpMethod = "POST";
string data = "{\"hostname\": \"www.qa.worldventures.com\",\"objects\": [\"www.qa.worldventures.com/getmedia/9931b92a-c7f3-4a71-ab27-37e2b13572c0/should-be-here.jpg?width=2541&height=1811&ext=.jpg\"]}";
Stream uploadStream = null;
uploadStream = new MemoryStream(data.ToByteArray());
var uri = new Uri(apiurl);
var request = WebRequest.Create(uri);
request.Method = httpMethod;
var signer = new EdgeGridV1Signer();
var credential = new ClientCredential(clientToken, accessToken, secret);
signer.Sign(request, credential);
using (var result = signer.Execute(request, credential))
{
using (result)
{
using (var reader = new StreamReader(result))
{
string value = reader.ReadToEnd();
}
}
}
}
/// <summary>
/// Following function will invalidate cache from Akamai server
/// </summary>
public void PurgeCacheAkamai()
{
string secret = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
string clientToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
string accessToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX";
string apiurl = "https://akab-7t5mh54r7lq3a7d7-js5q6mdtx42qimcn.purge.akamaiapis.net/ccu/v3/invalidate/url/production";
List<string> headers = new List<string>();
string httpMethod = "POST";
string outputfile = null;
string data = "{\"hostname\": \"www.qa.worldventures.com\",\"objects\": [\"www.qa.worldventures.com/getmedia/9931b92a-c7f3-4a71-ab27-37e2b13572c0/should-be-here.jpg?width=2541&height=1811&ext=.jpg\"]}";
int maxBodySize = 2048;
EdgeGridV1Signer signer = new EdgeGridV1Signer(null, maxBodySize);
ClientCredential credential = new ClientCredential(clientToken, accessToken, secret);
Stream uploadStream = null;
uploadStream = new MemoryStream(data.ToByteArray());
var uri = new Uri(apiurl);
var request = WebRequest.Create(uri);
foreach (string header in headers) request.Headers.Add(header);
request.Method = httpMethod;
Stream output = Console.OpenStandardOutput();
if (outputfile != null)
output = new FileInfo(outputfile).OpenWrite();
signer.Sign(request, credential, uploadStream);
using (var result = signer.Execute(request, credential, uploadStream))
{
using (output)
{
using (result)
{
using (var reader = new StreamReader(result))
{
string value = reader.ReadToEnd();
}
}
}
}
//using (var result = signer.Execute(request, credential))
//{
// using (result)
// {
// using (var reader = new StreamReader(result))
// {
// string value = reader.ReadToEnd();
// }
// }
//}
}
public void PurgeCacheAkamai2()
{
// Create a request using a URL that can receive a post.
WebRequest request = WebRequest.Create("https://akab-7t5mh54r7lq3a7d7-js5q6mdtx42qimcn.purge.akamaiapis.net/ccu/v3/invalidate/url/production");
// Set the Method property of the request to POST.
request.Method = "POST";
// Create POST data and convert it to a byte array.
string postData = "This is a test that posts this string to a Web server.";
byte[] byteArray = Encoding.UTF8.GetBytes(postData);
// Set the ContentType property of the WebRequest.
request.ContentType = "application/x-www-form-urlencoded";
// Set the ContentLength property of the WebRequest.
request.ContentLength = byteArray.Length;
// Get the request stream.
Stream dataStream = request.GetRequestStream();
// Write the data to the request stream.
dataStream.Write(byteArray, 0, byteArray.Length);
// Close the Stream object.
dataStream.Close();
// Get the response.
WebResponse response = request.GetResponse();
// Display the status.
Console.WriteLine(((HttpWebResponse)response).StatusDescription);
// Get the stream containing content returned by the server.
dataStream = response.GetResponseStream();
// Open the stream using a StreamReader for easy access.
StreamReader reader = new StreamReader(dataStream);
// Read the content.
string responseFromServer = reader.ReadToEnd();
// Display the content.
Console.WriteLine(responseFromServer);
// Clean up the streams.
reader.Close();
dataStream.Close();
response.Close();
}
}

It depends on how you want to do it.
For example if you are still using CCU or Fast purge feature of akamai.
If you are using CCU or fast purge then best thing to do is, tag all your images to a dedicated CP Code.
Once done you can purge the CP Code which will intern purge all the images.
Few useful documents that you can refer for CCU:
https://developer.akamai.com/api/purge/ccu-v2/overview.html
For Fast Purge:
https://control.akamai.com/dl/customers/FIMA/Fast-Purge-QuickRef.pdf
https://github.com/akamai-open/api-kickstart/tree/master/examples
Thank,
Vinod

Related

converting a piece of javascript code to c# access API

I need a C# code that will trigger a nprinting task. On our server we are not allowed to evoke html file, hence I can't use javascript attached.
The attached works just need to translate it to .net as I can't use html on our server
Javascripts below works just fine
<html>
<head>
</head>
<body>
<h1>NPrinting API task starter</h1>
<script src="http://code.jquery.com/jquery-1.10.1.min.js"></script>
<script type="text/javascript">
(function(){
console.log("started")
var taskIDs=[
"f3ebd873-b310-4a22-a269-24ce81b8ce74"
]
$.ajax({
url: 'URL:4993/api/v1/login/ntlm',
xhrFields: {
withCredentials: true
}
}).done(function(data) {
console.log(data);
for(var i=0;i<taskIDs.length;i++){
$.ajax({
type: "POST",
url: 'URL:4993/api/v1/tasks/'+taskIDs[i]+'/executions',
xhrFields: {
withCredentials: true
}
}).done(function(data) {
console.log("task "+i);
console.log(data);
if(i==taskIDs.length)
open(location, '_self').close();
});
}
});
})();
<!-- open(location, '_self').close(); -->
</script>
</body>
</html>
C# code which I can't complete all the below works but doesn't start the task.
//Create the HTTP Request (authenticate) and add required headers
ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12;
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(URL:4993/api/v1/login/ntlm");
CookieContainer cookies = new CookieContainer();
request.CookieContainer = cookies;
request.Method = "GET";
request.UserAgent = "Windows";
request.Accept = "application/json";
// specify to run as the current Microsoft Windows user
request.UseDefaultCredentials = true;
try
{
// make the web request and return the content
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
StreamReader responseReader = new StreamReader(response.GetResponseStream());
string sResponseHTML = responseReader.ReadToEnd();
Console.WriteLine(sResponseHTML);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
//Create second HTTP request (get list of apps) and add required headers
HttpWebRequest secondRequest = (HttpWebRequest)WebRequest.Create(#"URL:4993/api/v1/tasks/f3ebd873-b310-4a22-a269-24ce81b8ce74/executions");
//assign cookie to request to maintain session
secondRequest.CookieContainer = cookies;
secondRequest.Method = "POST";
secondRequest.UserAgent = "Windows";
secondRequest.Accept = "application/json";
// specify to run as the current Microsoft Windows user
secondRequest.UseDefaultCredentials = true;
Thanks
I found a solution to the above, request.
Nprinting API task to run from C#
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading.Tasks;
namespace Post_Request_API
{
class Program
{
static void Main(string[] args)
{
//Create the HTTP Request (authenticate) and add required headers
ServicePointManager.SecurityProtocol = SecurityProtocolType.Tls12;
HttpWebRequest request = (HttpWebRequest)WebRequest.Create(#"URL:4993/api/v1/login/ntlm");
//Assign custom SSL certificate validation method if certificate is untrusted
//request.ServerCertificateValidationCallback += (sender, certificate, chain, sslPolicyErrors) => true;
CookieContainer cookies = new CookieContainer();
request.CookieContainer = cookies;
request.Method = "GET";
request.UserAgent = "Windows";
request.Accept = "application/json";
//Specify to run as the current Microsoft Windows user
request.UseDefaultCredentials = true;
try
{
// make the web request and return the content
HttpWebResponse response = (HttpWebResponse)request.GetResponse();
StreamReader responseReader = new StreamReader(response.GetResponseStream());
string sResponseHTML = responseReader.ReadToEnd();
Console.WriteLine(sResponseHTML);
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
//Create second HTTP request to add a new user and required headers
HttpWebRequest secondRequest = (HttpWebRequest)WebRequest.Create(#"URL:4993/api/v1/tasks/f3ebd873-b310-4a22-a269-24ce81b8ce74/executions");
//Assign custom SSL certificate validation method if certificate is untrusted
//secondRequest.ServerCertificateValidationCallback += (sender, certificate, chain, sslPolicyErrors) => true;
//Add the XSRF token
secondRequest.Headers.Add("X-XSRF-TOKEN", cookies.GetCookies(request.RequestUri)["NPWEBCONSOLE_XSRF-TOKEN"].Value);
secondRequest.CookieContainer = cookies;
secondRequest.Method = "POST";
secondRequest.UserAgent = "Windows";
secondRequest.Accept = "application/json";
secondRequest.ContentType = "application/json";
//Specify to run as the current Microsoft Windows user
secondRequest.UseDefaultCredentials = true;
//Prepare JSON object to send to the remote server
JsonUser user = new JsonUser();
user.ID = "";
user.type = "";
user.task = "";
user.created = "";
user.lastUpdate = "";
user.completed = "";
user.progress = "";
user.status = "Enqueued";
user.result = "";
user.priority = "";
string jUserString = JsonConvert.SerializeObject(user);
using (var streamWriter = new StreamWriter(secondRequest.GetRequestStream()))
{
streamWriter.Write(jUserString);
streamWriter.Flush();
streamWriter.Close();
}
try
{
HttpWebResponse response2 = (HttpWebResponse)secondRequest.GetResponse();
StreamReader responseReader2 = new StreamReader(response2.GetResponseStream());
string sResponseHTML2 = responseReader2.ReadToEnd();
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
}
public class JsonUser
{
public string ID { get; set; }
public string type { get; set; }
public string task { get; set; }
public string created { get; set; }
public string lastUpdate { get; set; }
public string completed { get; set; }
public string progress { get; set; }
public string status { get; set; }
public string result { get; set; }
public string priority { get; set; }
}
}
}

How to call Export PDF with Reserved.ReportViewerWebControl.axd....URL from BackEnd C# Code

My Aim is to call ExportBaseUrl Link , that is given by Rdlc Exprot PDF button , on C# side, i want to do that because there are 285 reports and each one have diff parameters so this will take a lot time.
I have worked on one solution but that take 15 min to load 2 page RDLC to pdf.
Its taking time due to Response coming late or some deadlock is happening,
This is what i am doing.
JS file
var reportViewerName = ControlName; //Name attribute of report viewer control.
var src_url = $find(reportViewerName)._getInternalViewer().ExportUrlBase + 'PDF';
var contentDisposition = 'AlwaysInline'; //Content Disposition instructs the server to either return the PDF being requested as an attachment or a viewable report.
var src_new = src_url.replace(/(ContentDisposition=).*?(&)/, '$1' + contentDisposition + '$2');
window.open("/printPDF.asx?url=" + encodeURIComponent("http://localhost:5402"+src_new));
PrintPDF.aspx File is like this
using iText.Kernel.Pdf;
using iText.Kernel.Pdf.Action;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading.Tasks;
using System.Web;
namespace WebApp.WebAPI
{
/// <summary>
/// Summary description for printPDF
/// </summary>
public class printPDF : IHttpHandler
{
public void ProcessRequest(HttpContext context)
{
context.Response.ContentType = "text/plain";
context.Response.Write("Hello World");
string url = context.Request.QueryString["url"];
// Download data.
DownloadFile(url, context.Server.MapPath("~/pdf/pdffile.pdf"), context.Request).Wait();
PdfDocument pdfDoc = new PdfDocument(new PdfReader(context.Server.MapPath("~/pdf/pdffile.pdf")), new PdfWriter(context.Server.MapPath("~/pdf/pdffileAuto.pdf")));
// add content
PdfAction action = PdfAction.CreateJavaScript("this.print({bUI: true, bSilent: true, bShrinkToFit: true});");
pdfDoc.GetCatalog().SetOpenAction(action);
pdfDoc.Close();
context.Response.Clear();
context.Response.ContentType = "application/pdf";
context.Response.AddHeader("Content-Disposition",
"AlwaysInline;filename=\"FileName.pdf\"");
context.Response.BinaryWrite(File.ReadAllBytes(context.Server.MapPath("~/pdf/pdffileAuto.pdf")));
context.Response.Flush();
context.Response.End();
}
public async Task DownloadFile(string url, string destinationPath, HttpRequest req)
{
var request = (HttpWebRequest)WebRequest.Create(url);
request.Method = "GET";
var encoding = new UTF8Encoding();
request.Headers.Add(HttpRequestHeader.AcceptLanguage, "en-IN");
request.Headers.Add(HttpRequestHeader.AcceptEncoding, "gzip, deflate");
request.Accept = "text/html, application/xhtml+xml, image/jxr, */*";
request.UserAgent = "Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko";
request.KeepAlive = true;
request.Proxy = null;
request.CookieContainer = new CookieContainer();
Uri target = new Uri("http://localhost:5402/");
foreach (String item in req.Cookies)
{
request.CookieContainer.Add(new Cookie(item, req.Cookies[item].Value) { Domain = target.Host });
}
await request.GetResponseAsync().ContinueWith(t1 =>
{
using (var responseStream = t1.Result.GetResponseStream())
{
if (responseStream == null)
return;
int bufferSize = 1024;
byte[] buffer = new byte[bufferSize];
int bytesRead = 0;
using (FileStream fileStream = File.Create(destinationPath))
{
while ((bytesRead = responseStream.Read(buffer, 0, bufferSize)) != 0)
{
fileStream.Write(buffer, 0, bytesRead);
}
}
}
t1.Result.Close();
});
}
public bool IsReusable
{
get
{
return false;
}
}
}
}
line
await request.GetResponseAsync().ContinueWith(t1 =>
with async and without async takes around 15 min time once, second time it go to deadlock/Freez
also i had to add cookie due to the url was throwing 500 internal server error.
And if i call url direct on browser it runs in 1 sec.
So if anyone know what is issue or can help then that would be really big help.
Thanks for Help in Advance.
Ok
I found what issue is,
Issue is tab request PrintPDF.aspx and that page request other URL on same site.
So untill PrintPDF.aspx response complete to tab that (HttpWebRequest) is not being called.
Any reason why? i set maxconnection in web.config though
I had to fix it by making 2 diff files, first ASPX file which show page and generate pdf in thread and on page load call Ashx file which checks if file is generated or not, if generated then return file.
there was not other way, so i fixed it by 2 links redirect call.
Thank Everyone who helped me.

Azure DataLake File Download From Javascript

I was trying to download the file from azure data lake storage. it's working on c# side using Rest API. but it's not working in a java script.
My Sample c# code is
//Get Access Token
public DataLakeAccessToken ServiceAuth(string tenatId, string clientid, string clientsecret)
{
var authtokenurl = string.Format("https://login.microsoftonline.com/{0}/oauth2/token", tenatId);
using (var client = new HttpClient())
{
var model = new List<KeyValuePair<string, string>>()
{
new KeyValuePair<string, string>("grant_type","client_credentials"),
new KeyValuePair<string, string>("resource","https://management.core.windows.net/"),//Bearer
new KeyValuePair<string, string>("client_id",clientid),
new KeyValuePair<string, string>("client_secret",clientsecret),
};
var content = new FormUrlEncodedContent(model);
HttpResponseMessage response = client.PostAsync(authtokenurl, content).Result;
if (response.StatusCode == System.Net.HttpStatusCode.OK)
{
var accessToken = JsonConvert.DeserializeObject<DataLakeAccessToken>(response.Content.ReadAsStringAsync().Result);
return accessToken;
}
else
{
return null;
}
}
}
File Download Code is
public void DownloadFile(string srcFilePath, ref string destFilePath)
{
int i = 0;
var folderpath = Path.GetDirectoryName(destFilePath);
var filename = Path.GetFileNameWithoutExtension(destFilePath);
var extenstion = Path.GetExtension(destFilePath);
Increment:
var isfileExist = File.Exists(destFilePath);
if (isfileExist)
{
i++;
destFilePath = folderpath+filename + "_" + i + "_" + extenstion;
goto Increment;
}
string DownloadUrl = "https://{0}.azuredatalakestore.net/webhdfs/v1/{1}?op=OPEN&read=true";
var fullurl = string.Format(DownloadUrl, _datalakeAccountName, srcFilePath);
using (var client = new HttpClient())
{
client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _accesstoken.access_token);
using (var formData = new MultipartFormDataContent())
{
var response = client.GetAsync(fullurl).Result;
using (var fs = new FileStream(destFilePath, FileMode.Create, FileAccess.Write, FileShare.None))
{
response.Content.CopyToAsync(fs).Wait();
}
}
}
}
first i was Generate the token using client credentials and the token based download file using path example https://mydatalaksestore.azuredatalaksestore.net/myfolder/myfile i pass myfolder/myfile in source path and destFilePath file name based download the file
in javascript i was get the accesstoken from my api server and send the request for mydatalakestore it's throw error for cross orgin for localhost:8085 like this
Any one know how to download the datalake store file using Javascript from Client Side using Access Token without cross orgin error
Thanks in Advance

Batched Media Upload to Azure Blob Storage through WebApi

My web app currently allows users to upload media one-at-a-time using the following:
var fd = new FormData(document.forms[0]);
fd.append("media", blob); // blob is the image/video
$.ajax({
type: "POST",
url: '/api/media',
data: fd
})
The media then gets posted to a WebApi controller:
[HttpPost, Route("api/media")]
public async Task<IHttpActionResult> UploadFile()
{
if (!Request.Content.IsMimeMultipartContent("form-data"))
{
throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
}
string mediaPath = await _mediaService.UploadFile(User.Identity.Name, Request.Content);
return Ok(mediaPath);
}
Which then does something along the lines of:
public async Task<string> UploadFile(string username, HttpContent content)
{
var storageAccount = new CloudStorageAccount(new StorageCredentials(accountName, accountKey), true);
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer imagesContainer = blobClient.GetContainerReference("container-" + user.UserId);
var provider = new AzureStorageMultipartFormDataStreamProvider(imagesContainer);
await content.ReadAsMultipartAsync(provider);
var filename = provider.FileData.FirstOrDefault()?.LocalFileName;
// etc
}
This is working great for individual uploads, but how do I go about modifying this to support batched uploads of multiple files through a single streaming operation that returns an array of uploaded filenames? Documentation/examples on this seem sparse.
public class AzureStorageMultipartFormDataStreamProvider : MultipartFormDataStreamProvider
{
private readonly CloudBlobContainer _blobContainer;
private readonly string[] _supportedMimeTypes = { "images/png", "images/jpeg", "images/jpg", "image/png", "image/jpeg", "image/jpg", "video/webm" };
public AzureStorageMultipartFormDataStreamProvider(CloudBlobContainer blobContainer) : base("azure")
{
_blobContainer = blobContainer;
}
public override Stream GetStream(HttpContent parent, HttpContentHeaders headers)
{
if (parent == null) throw new ArgumentNullException(nameof(parent));
if (headers == null) throw new ArgumentNullException(nameof(headers));
if (!_supportedMimeTypes.Contains(headers.ContentType.ToString().ToLower()))
{
throw new NotSupportedException("Only jpeg and png are supported");
}
// Generate a new filename for every new blob
var fileName = Guid.NewGuid().ToString();
CloudBlockBlob blob = _blobContainer.GetBlockBlobReference(fileName);
if (headers.ContentType != null)
{
// Set appropriate content type for your uploaded file
blob.Properties.ContentType = headers.ContentType.MediaType;
}
this.FileData.Add(new MultipartFileData(headers, blob.Name));
return blob.OpenWrite();
}
}
Assuming your AzureStorageMultipartFormDataStreamProvider is similar to the same class mentioned on this blog, that is actually already processing multiple files if there are multiple files in the request.
So all you need to do is change your UploadFile to return a IEnumerable<string> and change your controller to have mediaPath as such.
So your MediaService would have:
var filenames = provider.FileData.Select(x => x.LocalFileName).ToList(); ;
return filenames;
And your controller would have:
var mediaPaths = await _mediaService.UploadFile(User.Identity.Name, Request.Content);
return Ok(mediaPaths);
Since you don't post the related codes with the AzureStorageMultipartFormDataStreamProvider class.
So I create a custom AzureStorageMultipartFormDataStreamProvider which inherits from the MultipartFileStreamProvider to enable the web api upload batched uploads of multiple files.
In the AzureStorageMultipartFormDataStreamProvider we could override the ExecutePostProcessingAsync method.
In this method, we could get the upload file data, then we could upload these data to the azure storage.
More details, you could refer to below codes. The total Controller.
public class UploadingController : ApiController
{
public Task<List<FileItem>> PostFile()
{
if (!Request.Content.IsMimeMultipartContent("form-data"))
{
throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType);
}
var multipartStreamProvider = new AzureStorageMultipartFormDataStreamProvider(GetWebApiContainer());
return Request.Content.ReadAsMultipartAsync<AzureStorageMultipartFormDataStreamProvider>(multipartStreamProvider).ContinueWith<List<FileItem>>(t =>
{
if (t.IsFaulted)
{
throw t.Exception;
}
AzureStorageMultipartFormDataStreamProvider provider = t.Result;
return provider.Files;
});
}
public static CloudBlobContainer GetWebApiContainer(string containerName = "webapi-file-container")
{
// Retrieve storage account from connection-string
CloudStorageAccount storageAccount = CloudStorageAccount.Parse(
"your connection string");
// Create the blob client
CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient();
CloudBlobContainer container = blobClient.GetContainerReference(containerName);
// Create the container if it doesn't already exist
container.CreateIfNotExists();
// Enable public access to blob
var permissions = container.GetPermissions();
if (permissions.PublicAccess == BlobContainerPublicAccessType.Off)
{
permissions.PublicAccess = BlobContainerPublicAccessType.Blob;
container.SetPermissions(permissions);
}
return container;
}
}
public class FileItem
{
/// <summary>
/// file name
/// </summary>
public string Name { get; set; }
/// <summary>
/// size in bytes
/// </summary>
public string SizeInMB { get; set; }
public string ContentType { get; set; }
public string Path { get; set; }
public string BlobUploadCostInSeconds { get; set; }
}
public class AzureStorageMultipartFormDataStreamProvider : MultipartFileStreamProvider
{
private CloudBlobContainer _container;
public AzureStorageMultipartFormDataStreamProvider(CloudBlobContainer container)
: base(Path.GetTempPath())
{
_container = container;
Files = new List<FileItem>();
}
public List<FileItem> Files { get; set; }
public override Task ExecutePostProcessingAsync()
{
// Upload the files to azure blob storage and remove them from local disk
foreach (var fileData in this.FileData)
{
var sp = new Stopwatch();
sp.Start();
string fileName = Path.GetFileName(fileData.Headers.ContentDisposition.FileName.Trim('"'));
CloudBlockBlob blob = _container.GetBlockBlobReference(fileName);
blob.Properties.ContentType = fileData.Headers.ContentType.MediaType;
//set the number of blocks that may be simultaneously uploaded
var requestOption = new BlobRequestOptions()
{
ParallelOperationThreadCount = 5,
SingleBlobUploadThresholdInBytes = 10 * 1024 * 1024 ////maximum for 64MB,32MB by default
};
//upload a file to blob
blob.UploadFromFile(fileData.LocalFileName, options: requestOption);
blob.FetchAttributes();
File.Delete(fileData.LocalFileName);
sp.Stop();
Files.Add(new FileItem
{
ContentType = blob.Properties.ContentType,
Name = blob.Name,
SizeInMB = string.Format("{0:f2}MB", blob.Properties.Length / (1024.0 * 1024.0)),
Path = blob.Uri.AbsoluteUri,
BlobUploadCostInSeconds = string.Format("{0:f2}s", sp.ElapsedMilliseconds / 1000.0)
});
}
return base.ExecutePostProcessingAsync();
}
}
The result like this:
I would checkout uploading the media directly to the blob storage after getting the SAS token for all your files from the Web API in one request. Upload the files using a promise and http get from your client, which will parallelize the upload.
Which will be your right design and approach. Which will also increase your upload speed and reduce the latency.

Error on Downloading From using Asp.net web api

I'm using the code below for downloading with the web API in ASP.NET.
When I'm trying to click the download button, it calls the API.
After executing the "DownloadFile"-function, the download dialog box isn't coming .
[HttpGet]
public HttpResponseMessage DownloadFile(string DownloadFilePath)
{
HttpResponseMessage result = null;
var localFilePath = HttpContext.Current.Server.MapPath(DownloadFilePath);
// check if parameter is valid
if (String.IsNullOrEmpty(DownloadFilePath))
{
result = Request.CreateResponse(HttpStatusCode.BadRequest);
}
// check if file exists on the server
else if (!File.Exists(localFilePath))
{
result = Request.CreateResponse(HttpStatusCode.Gone);
}
else
{// serve the file to the client
result = Request.CreateResponse(HttpStatusCode.OK);
result.Content = new StreamContent(new FileStream(localFilePath, FileMode.Open, FileAccess.Read));
result.Content.Headers.ContentDisposition = new System.Net.Http.Headers.ContentDispositionHeaderValue("attachment");
result.Content.Headers.ContentDisposition.FileName = DownloadFilePath;
}
return result;
}
I didn't get any exception from the code above, but the dialog box for downloading the file isn't coming.
Here is the code, I am using and it works great. I hope it will give you an idea
....
var fileBytes = Helper.GetFileBytes(filePath);//convert file to bytes
var stream = new MemoryStream(fileBytes);
resp.Content = new StreamContent(stream);
resp.Content.Headers.ContentType = new MediaTypeHeaderValue("application/vnd.openxmlformats-officedocument.spreadsheetml.sheet");
resp.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = filerequest.FileName };
resp.Content.Headers.Add("Content-Encoding", "UTF-8");
return resp;
And, here is the code for GetFileBytes method,
public static byte[] GetFileBytes(string filePath)
{
var fileInfo = new FileInfo(filePath);
if (fileInfo.Exists)
{
return File.ReadAllBytes(fileInfo.FullName);
}
return null;
}

Categories

Resources