How to play video from a blob url? - javascript

I'm new into front end and I am trying to make a Zoom clone using Blazor. Right now I can open the camera and get the stream and send with signalR, but I can't find a way to play the video in the clients. I don't know much of JS, so I get the code from this questions in this very site:
Get a stream of bytes from navigator.mediaDevices.getUserMedia()?
Convert blob to base64
How to receive continuous chunk of video as a blob array and set to video tag dynamically in Websocket
The JS code
let stream = null;
let recorder = null;
let videoData = null;
let videoTeste = null;
let chunks = [];
let wholeVideo = [];
let mediaRecorder;
async function onStart(options) {
let video = document.getElementById(options.videoID);
if (navigator.mediaDevices.getUserMedia) {
try {
stream = await navigator.mediaDevices.getUserMedia({ video: true });
video.srcObject = stream;
video.play();
recorder = new MediaRecorder(stream);
recorder.ondataavailable = event => {
videoData = event.data;
chunks.push(videoData);
sendData();
};
recorder.start(100);
}
catch (err) {
console.log("An error occurred: " + err);
}
}
}
async function sendData() {
const superBuffer = new Blob(chunks, {
type: 'video/mp4'
});
//let base64data = window.URL.createObjectURL(superBuffer);
let base64data = await blobToBase64(superBuffer);
if (videoTeste) {
chunks = [];
videoTeste.invokeMethodAsync("SendVideoData", base64data);
window.URL.revokeObjectURL(base64data);
}
}
async function blobToBase64(blob) {
return new Promise((resolve, _) => {
const reader = new FileReader();
reader.onloadend = () => resolve(reader.result);
reader.readAsDataURL(blob);
return reader;
});
}
async function playVideo(source) {
try {
let video = document.getElementById("videoplayer");
video.srcObject = null;
let currentTime = video.currentTime;
let file = await fetch(source).then(r => r.blob());
video.src = file;
video.currentTime = currentTime;
video.play();
}
catch (err) {
console.log("An error occurred: " + err);
}
}
window.OnClassWebCam = {
start: async (options) => {
await onStart(options);
},
videoPlayer: async (source) => {
await playVideo(source);
},
dotNetHelper: async (dotNetHelper) => {
videoTeste = dotNetHelper;
}
};
The C# Front Code:
using Microsoft.AspNetCore.Components;
using Microsoft.AspNetCore.SignalR.Client;
using Microsoft.JSInterop;
using System.Text.Json;
namespace OnClassBlazor.Pages
{
public class VideoTesteBase : ComponentBase
{
[Inject]
protected IJSRuntime JSRuntime { get; set; }
private HubConnection? hubConnection;
protected string DataAtual = DateTime.Now.ToString();
protected string SourceVideo = string.Empty;
public async Task Start()
{
await JSRuntime.InvokeVoidAsync("OnClassWebCam.start", options);
}
protected override async Task OnInitializedAsync()
{
var dotNetReference = DotNetObjectReference.Create(this);
await JSRuntime.InvokeVoidAsync("OnClassWebCam.dotNetHelper", dotNetReference);
hubConnection = new HubConnectionBuilder()
.WithUrl(#"http://localhost:5000/videohub")
.ConfigureLogging(o => {
o.SetMinimumLevel(LogLevel.Trace);
})
.Build();
hubConnection.On<string>("ReceiveStream", (source) =>
{
JSRuntime.InvokeVoidAsync("OnClassWebCam.videoPlayer", source);
});
await hubConnection.StartAsync();
}
[JSInvokable]
public async Task SendVideoData(string stream)
{
Console.WriteLine($"stream size {stream.Length}");
if (IsConnected)
{
await hubConnection.SendAsync("UploadStreamBytes", stream);
}
}
public bool IsConnected =>
hubConnection?.State == HubConnectionState.Connected;
public async ValueTask DisposeAsync()
{
if (hubConnection is not null)
{
await hubConnection.DisposeAsync();
}
}
protected WebCamOptions options = new WebCamOptions()
{
CanvasID = "canvas",
VideoID = "video"
};
protected override void OnInitialized()
{
}
}
public class WebCamOptions
{
public int Width { get; set; } = 960;
public int Height { get; set; } = 540;
public string VideoID { get; set; }
public string CanvasID { get; set; }
public string Filter { get; set; } = null;
}
}
The C# Hub code:
using Microsoft.AspNetCore.SignalR;
using System.Text.Json;
using System.Threading.Channels;
namespace OnClass.API.Hubs
{
public class VideoHub : Hub
{
public async Task SendStream(object stream)
{
await Clients.All.SendAsync("ReceiveMessage", stream);
}
public async Task UploadStreamBytes(string stream)
{
Console.WriteLine($"UploadStreamBytes size: {stream.Length}");
await Clients.All.SendAsync("ReceiveStream", stream);
}
}
}
The component code:
#page "/videochat"
#inherits VideoTesteBase
<h3>VideoTeste</h3>
<div id="container">
<video id="#options.VideoID"
autoplay="true" muted="muted"
width="#options.Width"
height="#options.Height">
</video>
<button id="start" #onclick="Start" disabled="#(!IsConnected)">Start Video</button>
</div>
<div id="videodastream">
<video id="videoplayer"
autoplay="true" muted="muted"
width="100"
height="100">
</video>
<button id="aqui" >Video</button>
</div>

Related

Data binding from Javascript returns empty array in C#

In console.log I can see the array is not empty,as well it's shown on the image below. However, when I send the data to the endpoint the array is 0. I notice the other element MaterialId has value, so must be some problem with the array only. The data is sent through axios.
Any help is appreciated.
C# Model data:
public class axiosChangeMaterialPictureModel
{
public Array[] Image { get; set; }
public int MaterialId { get; set; }
}
C# Endpoint:
[HttpPost]
public IActionResult ChangeMaterialPicture([FromBody] axiosChangeMaterialPictureModel data)
{
string defaultPath = _webHostEnvironment.WebRootPath;
string oldPicture = _warehouseService.ChangeMaterialPicture(data.Image, data.MaterialId, defaultPath);
if (!string.IsNullOrEmpty(oldPicture))
{
// Delete the old image
_convertService.DeleteMaterialFile(oldPicture);
return Ok();
}
else
{
return BadRequest();
}
}
Javascript:
let arrBinaryFile = [];
let file = document.getElementById(`file-${materialId}`).files[0];
let reader = new FileReader();
// Array
reader.readAsArrayBuffer(file);
reader.onloadend = function (evt) {
if (evt.target.readyState == FileReader.DONE) {
var arrayBuffer = evt.target.result,
array = new Uint8Array(arrayBuffer);
for (var i = 0; i < array.length; i++) {
arrBinaryFile.push(array[i]);
}
}
}
console.log(arrBinaryFile);
let baseUrl = `${baseSharedUrl}/Warehouse/ChangeMaterialPicture`;
var data = {
Image : arrBinaryFile,
MaterialId: materialId
}
axios.post(baseUrl, data)
.then(function (response) {
})
.catch(function (error) {
})
Javascript Array Image:
ImageFromTheArray
UPDATE:
After some research, to send array data I had to add the header with octet-stream. I'm getting 415 Unsupported Media Type, however, in the request I can see the data-with the array. Now the problem is how can I solve this 415?
let config = {
headers: {
"Content-Type": "application/octet-stream",
}
}
public Array[] Image { get; set; } looks suspicious.
Have you tried with byte[]? public byte[] Image { get; set; }
You should post data inside onloadend callback:
reader.onloadend = function (evt) {
if (evt.target.readyState == FileReader.DONE) {
var arrayBuffer = evt.target.result,
array = new Uint8Array(arrayBuffer);
for (var i = 0; i < array.length; i++) {
arrBinaryFile.push(array[i]);
}
//post data when arrBinaryFile is ready
console.log(arrBinaryFile);
let baseUrl = `${baseSharedUrl}/Warehouse/ChangeMaterialPicture`;
var data = {
Image : arrBinaryFile,
MaterialId: materialId
}
axios.post(baseUrl, data)
.then(function (response) {
})
.catch(function (error) {
})
}
}

Can't switch to back camera in Android WebView using PeerJs

I've been trying to make a simple WebRTC app using Firebase Database and PeerJs that can switch cameras. I found one tutorial and it works properly, but I want to switch the camera between front and back which is not included in the tutorial.
AndroidManifest.xml
<?xml version="1.0" encoding="utf-8"?>
<manifest
...
<uses-feature android:name="android.hardware.camera.any"/>
<uses-feature android:name="android.hardware.camera"/>
<uses-permission android:name="android.permission.INTERNET"/>
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.RECORD_AUDIO"/>
<uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS"/>
...
</manifest>
CallActivity.java
import static android.view.View.GONE;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.webkit.PermissionRequest;
import android.webkit.WebChromeClient;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import android.widget.Button;
import android.widget.EditText;
import android.widget.RelativeLayout;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import java.util.UUID;
public class CallActivity extends AppCompatActivity {
private static final String TAG = CallActivity.class.getSimpleName();
private final String CAMERA_FRONT = "user";
private final String CAMERA_BACK = "environment"; // Tried to use it on navigator.mediaDevices.getUserMedia({video: {facingMode: camera}}) but it didn't work.
private RelativeLayout layoutIncoming, layoutCall, layoutCallControl;
private Button buttonReject, buttonAccept, buttonCall, buttonAudio, buttonVideo, buttonCamera;
private EditText editTextCallName;
private TextView textViewIncoming;
private WebView webView;
private String name;
private String callerName;
private boolean isPeerConnected = false;
private DatabaseReference usersRef = FirebaseDatabase.getInstance("link_to_firebase_database").getReference("users");
private boolean videoEnabled = true;
private boolean audioEnabled = true;
private String camera = CAMERA_FRONT;
private String uniqueID;
//== Overridden ==//
#Override
protected void onCreate(#Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_call);
layoutIncoming = findViewById(R.id.activity_call_layoutIncoming);
layoutCall = findViewById(R.id.activity_call_layoutCall);
layoutCallControl = findViewById(R.id.activity_call_layoutCallControl);
buttonAccept = findViewById(R.id.activity_call_buttonAccept);
buttonReject = findViewById(R.id.activity_call_buttonReject);
buttonCall = findViewById(R.id.activity_call_buttonCall);
buttonVideo = findViewById(R.id.activity_call_buttonVideo);
buttonAudio = findViewById(R.id.activity_call_buttonAudio);
buttonCamera = findViewById(R.id.activity_call_buttonCamera);
editTextCallName = findViewById(R.id.activity_call_editTextCallName);
textViewIncoming = findViewById(R.id.activity_call_textViewIncoming);
webView = findViewById(R.id.activity_call_webView);
if (getIntent().hasExtra("name")) {
name = getIntent().getStringExtra("name");
}
buttonCall.setOnClickListener(view -> {
callerName = editTextCallName.getText().toString().trim();
if (!callerName.isEmpty()) sendCallRequest();
});
buttonVideo.setOnClickListener(view -> {
videoEnabled = !videoEnabled;
callJsFunction("javascript:toggleVideo(\"" + videoEnabled + "\")");
if (videoEnabled)
buttonVideo.setText("Video Off");
else
buttonVideo.setText("Video On");
});
buttonAudio.setOnClickListener(view -> {
audioEnabled = !audioEnabled;
callJsFunction("javascript:toggleAudio(\"" + audioEnabled + "\")");
if (audioEnabled)
buttonAudio.setText("Mute");
else
buttonAudio.setText("Unmute");
});
buttonCamera.setOnClickListener(view -> {
if (camera.equals(CAMERA_FRONT)) camera = CAMERA_BACK;
else camera = CAMERA_FRONT;
switchCamera();
});
setupWebView();
}
//== Public ==//
public void onPeerConnected() {
isPeerConnected = true;
}
//== Private ==//
private void setupWebView() {
WebChromeClient client = new WebChromeClient() {
#Override
public void onPermissionRequest(PermissionRequest request) {
runOnUiThread(() -> request.grant(request.getResources()));
}
};
webView.setWebChromeClient(client);
webView.getSettings().setJavaScriptEnabled(true);
webView.getSettings().setMediaPlaybackRequiresUserGesture(false);
webView.addJavascriptInterface(new JsInterface(this), "Android");
loadVideoCall();
}
private void loadVideoCall() {
String filePath = "file:///android_asset/call.html";
webView.loadUrl(filePath);
WebViewClient client = new WebViewClient() {
#Override
public void onPageFinished(WebView view, String url) {
initializePeer();
}
};
webView.setWebViewClient(client);
}
private void initializePeer() {
uniqueID = getUniqueID();
callJsFunction("javascript:init(\"" + uniqueID + "\")");
usersRef.child(name).child("incoming").addValueEventListener(new ValueEventListener() {
#Override
public void onDataChange(#NonNull DataSnapshot snapshot) {
Log.d(TAG, "Received incoming call!!!");
onCallRequest(snapshot.getValue(String.class));
}
#Override
public void onCancelled(#NonNull DatabaseError error) {
}
});
}
private void sendCallRequest() {
if (!isPeerConnected) {
Toast.makeText(this, "You're not connected to internet. Please try again.", Toast.LENGTH_SHORT).show();
return;
}
usersRef.child(callerName).child("incoming").setValue(name);
usersRef.child(callerName).child("isAvailable").addValueEventListener(new ValueEventListener() {
#Override
public void onDataChange(#NonNull DataSnapshot snapshot) {
boolean isAvailable = snapshot.getValue() != null? snapshot.getValue(boolean.class): false;
if (isAvailable) {
listenForConnectionID();
}
}
#Override
public void onCancelled(#NonNull DatabaseError error) {
}
});
}
private void onCallRequest(String caller) {
if (caller == null) return;
String incomingMessage = caller + " is calling...";
textViewIncoming.setText(incomingMessage);
buttonAccept.setOnClickListener(view -> {
usersRef.child(name).child("connectionID").setValue(uniqueID);
usersRef.child(name).child("isAvailable").setValue(true);
layoutIncoming.setVisibility(GONE);
switchToCallControls();
});
buttonReject.setOnClickListener(view -> {
usersRef.child(name).child("incoming").setValue(null);
layoutIncoming.setVisibility(GONE);
});
layoutIncoming.setVisibility(View.VISIBLE);
}
private void listenForConnectionID() {
usersRef.child(callerName).child("connectionID").addValueEventListener(new ValueEventListener() {
#Override
public void onDataChange(#NonNull DataSnapshot snapshot) {
if (snapshot.getValue() == null) return;
switchToCallControls();
callJsFunction("javascript:startCall(\"" + snapshot.getValue(String.class) + "\")");
}
#Override
public void onCancelled(#NonNull DatabaseError error) {
}
});
}
private void switchToCallControls() {
layoutCall.setVisibility(GONE);
layoutCallControl.setVisibility(View.VISIBLE);
}
private void switchCamera() {
Log.d(TAG, "switchCamera: " + camera);
callJsFunction("javascript:switchCamera(\"" + camera + "\")");
}
private void callJsFunction(String functionString) {
webView.post(() -> webView.evaluateJavascript(functionString, value -> Log.d(TAG, value)));
}
private String getUniqueID() {
return UUID.randomUUID().toString();
}
}
call.html
<!DOCTYPE html>
<html>
<head>
<link href="./style.css" rel="stylesheet"/>
</head>
<body>
<script src="./peerjs.js"></script>
<video class="secondaryVideo" autoplay id="remoteVideo"></video>
<video class="primaryVideo" autoplay muted id="localVideo"></video>
<script src="./call.js"></script>
</body>
</html>
call.js
let localVideo = document.getElementById("localVideo")
let remoteVideo = document.getElementById("remoteVideo")
localVideo.style.opacity = 0
remoteVideo.style.opacity = 0
let peer
function init(userID) {
peer = new Peer(userID)
peer.on('open', () => {
Android.onPeerConnected();
})
listen()
}
let localStream
function listen() {
peer.on('call', (call) => {
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(function(mediaStream) {
localStream = mediaStream
localVideo.srcObject = localStream
localVideo.style.opacity = 1
call.answer(localStream)
call.on('stream', (remoteStream) => {
remoteVideo.srcObject = remoteStream
remoteVideo.style.opacity = 1
// Swap classes of localVideo and remoteVideo
localVideo.className = "secondaryVideo"
remoteVideo.className = "primaryVideo"
})
})
})
}
function startCall(otherUserID) {
navigator.mediaDevices.getUserMedia({
video: true,
audio: true
}).then(function(mediaStream) {
localStream = mediaStream
localVideo.srcObject = localStream
localVideo.style.opacity = 1
const call = peer.call(otherUserID, localStream)
call.on('stream', (remoteStream) => {
remoteVideo.srcObject = remoteStream
remoteVideo.style.opacity = 1
// Swap classes of localVideo and remoteVideo
localVideo.className = "secondaryVideo"
remoteVideo.className = "primaryVideo"
})
})
}
function toggleVideo(b) {
if (b == "true") {
localStream.getVideoTracks()[0].enabled = true
} else {
localStream.getVideoTracks()[0].enabled = false
}
}
function toggleAudio(b) {
if (b == "true") {
localStream.getAudioTracks()[0].enabled = true
} else {
localStream.getAudioTracks()[0].enabled = false
}
}
let camIndex = 0
function switchCamera() {
navigator.mediaDevices.enumerateDevices().then(function(devices) {
var cameras = []
devices.forEach(function(device) {
'videoinput' === device.kind && cameras.push(device.deviceId)
})
console.log(cameras.length)
if (camIndex == cameras.length - 1) {
camIndex = 0
} else {
camIndex = camIndex + 1
}
var constraints = {
video: {deviceId: {exact: cameras[camIndex]}},
audio: true
}
navigator.mediaDevices.getUserMedia(constraints).then(function(mediaStream) {
localStream = mediaStream
localVideo.srcObject = localStream
console.log("camera switched to camIndex " + camIndex) // Only triggered when camIndex = 0
})
})
}
I assume that camIndex = 1 is a back camera but it gives this error message in the logcat
D/CallActivity: switchCamera: environment
E/chromium: [ERROR:web_contents_delegate.cc(218)] WebContentsDelegate::CheckMediaAccessPermission: Not supported.
E/chromium: [ERROR:web_contents_delegate.cc(218)] WebContentsDelegate::CheckMediaAccessPermission: Not supported.
D/CallActivity: null
I/chromium: [INFO:CONSOLE(97)] "2", source: file:///android_asset/call.js (97)
E/libc: Access denied finding property "persist.vendor.camera.privapp.list"
W/ThreadPoolSingl: type=1400 audit(0.0:35101): avc: denied { read } for name="u:object_r:vendor_camera_prop:s0" dev="tmpfs" ino=19669 scontext=u:r:untrusted_app:s0:c161,c256,c512,c768 tcontext=u:object_r:vendor_camera_prop:s0 tclass=file permissive=0
E/cr_VideoCapture: cameraDevice encountered an error
I/chromium: [INFO:CONSOLE(0)] "Uncaught (in promise) NotReadableError: Could not start video source", source: file:///android_asset/call.html (0)
D/CallActivity: switchCamera: user
E/chromium: [ERROR:web_contents_delegate.cc(218)] WebContentsDelegate::CheckMediaAccessPermission: Not supported.
E/chromium: [ERROR:web_contents_delegate.cc(218)] WebContentsDelegate::CheckMediaAccessPermission: Not supported.
D/CallActivity: null
I/chromium: [INFO:CONSOLE(97)] "2", source: file:///android_asset/call.js (97)
D/: PlayerBase::stop() from IPlayer
D/AudioTrack: stop(398): called with 62088 frames delivered
I/chromium: [INFO:CONSOLE(115)] "camera switched to camIndex 0", source: file:///android_asset/call.js (115)
W/.testapp_webrt: Attempt to remove non-JNI local reference, dumping thread
W/AudioManager: Use of stream types is deprecated for operations other than volume control
W/AudioManager: See the documentation of requestAudioFocus() for what to use instead with android.media.AudioAttributes to qualify your playback use case

Posted an array of string in ReactJS but got one string in WebAPI

I tried to post an array of string from ReactJS to WebApi but I got just one string [0].
Here is my ReactJS code:
import React, { useState } from "react";
import axios from "axios";
export const FileUpload = () => {
const [files, setFiles] = useState();
const [fileNames, setFileNames] = useState();
const saveFile = (e) => {
var tempfiles = files;
if (tempfiles == null) {
tempfiles = [];
}
tempfiles.push(e.target.files[0]);
setFiles(tempfiles);
var tempFileNames = fileNames;
if (tempFileNames == null) {
tempFileNames = [];
}
tempFileNames.push(e.target.files[0].name)
setFileNames(tempFileNames);
};
const uploadFile = async (e) => {
debugger
const formData = new FormData();
//upload many
for (let i = 0; i < files.length; i++) {
formData.append("Files[${i}]", files[i]);
// formData.append('FileNames[${i}]', fileNames[i]);
}
//upload 1
// formData.append("FormFile", file);
//add test data
formData.append('TestField', 'abcxyz');
formData.append('FileNames', fileNames);
formData.append('ProjectId', 123);
formData.append('NameToDisclose', false);
//
try {
//upload many
const res = await axios.post("https://localhost:44376/api/test/UploadMany", formData);
////upload 1
// const res = await axios.post("https://localhost:44376/api/test/Upload", formData);
console.log(res);
} catch (ex) {
console.log(ex);
}
};
return (
<>
<input type="file" onChange={saveFile} />
<input type="button" value="upload" onClick={uploadFile} />
</>
);
};
Here is my Controller:
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using System;
using UploadFileToWebApiBE.Model;
namespace UploadFileToWebApiBE.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class TestController : ControllerBase
{
[HttpPost]
[Route("UploadMany")]
public ActionResult Post([FromForm]UploadFileMany files)
{
try
{
files.Files = Request.Form.Files;
return StatusCode(StatusCodes.Status201Created);
}
catch (Exception ex)
{
return StatusCode(StatusCodes.Status500InternalServerError);
}
}
}
}
Here is my UploadFileMany:
using Microsoft.AspNetCore.Http;
namespace UploadFileToWebApiBE.Model
{
public class UploadFileMany
{
public string TestField { get; set; }
public IFormFileCollection Files { get; set; }
public string[] FileNames { get; set; }
public int ProjectId { get; set; }
public bool NameToDisclose { get; set; } = false;
}
}
This is the data from ReactJS:
This is the data from WebApi:
I want to have 3 items for FileNames, not one item seperated by a comma.
Any help will be much appreciated.
Try to pass filenames as a JSON string by using:
formData.append('FileNames', JSON.stringify(fileNames);
and then parse the JSON string in your back-end codes. because i remember that formData in JavaScript doesn't accept arrays and objects as input. You can stringify them and pass your arrays and objects in a standard format.
I hope this works for you

Download CSV file from assets folder in IONIC 3

I have one demo-file.csv file and it is in the assets/csv folder, so how can I download it from mobile,
here is my HTML & COMPONENT code.
HTML CODE
<button ion-button type="button" block (click)="downloadFile('assets/csv/demo-file.csv', 'demo-file.csv')">Download Demo File</button>
COMPONENT CODE
public downloadFile(link: any, fileName: any) {
if (link) {
let path = null;
this.showWaitingLoading();
if (this.platform.is('ios')) {
path = this.file.documentsDirectory;
} else {
path = this.file.dataDirectory;
}
const transfer = this.transfer.create();
transfer.download(link, path + fileName).then(entry => {
this.dismissWaitingLoading();
this.openFile(entry.toURL());
}).catch(() => {
this.dismissWaitingLoading();
this.showToastMsg('error', "Something went wrong");
});
}
}
/* ================= OPNE FILE FUNCTION ===========*/
public openFile(path: any) {
this.fileOpener.open(path, 'application/*')
.then(() => console.log('File is opened'))
.catch((e: any) => console.log('Error openening file', e));
}
I'm not able to download the file, is there any thing missing in my PATH?
Try to read it using Http get and write it as a Blob, Sample code as follows,
export class csvPage {
csvData: any[] = [];
headerRow: any[] = [];
constructor(public navCtrl: NavController,
public navParams: NavParams,
private http: Http) {
this.readCsvData();
}
private readCsvData() {
this.http.get('assets/dummyData.csv')
.subscribe(
data => this.extractData(data),
err => this.handleError(err)
);
}
private extractData(res) {
let csvData = res['_body'] || '';
let parsedData = papa.parse(csvData).data;
this.headerRow = parsedData[0];
parsedData.splice(0, 1);
this.csvData = parsedData;
}
downloadCSV() {
let csv = papa.unparse({
fields: this.headerRow,
data: this.csvData
});
// Dummy implementation for Desktop download purpose
var blob = new Blob([csv]);
var a = window.document.createElement("a");
a.href = window.URL.createObjectURL(blob);
a.download = "newdata.csv";
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
}
private handleError(err) {
console.log('something went wrong: ', err);
}
}
Html Code
<button ion-button type="button" block (click)="downloadFile('demo-file.csv')">Download Demo File</button>
Component Code
declare var cordova: any; // declare Top Section of component
public downloadFile(link: any) {
if (link) {
let path = null;
this.showWaitingLoading();
if (this.platform.is('ios')) {
path = cordova.file.documentsDirectory;
} else {
path = cordova.file.dataDirectory;
}
const transfer = this.transfer.create();
const imageLocation = `${cordova.file.applicationDirectory}www/assets/csv/${link}`;
transfer.download(imageLocation, path + link).then(entry => {
this.dismissWaitingLoading();
this.openFile(entry.toURL());
}).catch(() => {
this.dismissWaitingLoading();
this.showToastMsg('error', "Something went wrong");
})
}
/* ================= OPNE FILE FUNCTION ===========*/
public openFile(path: any) {
this.fileOpener.open(path, 'application/*')
.then(() => console.log('File is opened'))
.catch((e: any) => console.log('Error openening file', e));
}
Please try this one
You can use a library... Also, the HttpClient can read data as Blob for you.
npm i file-saver
// my.component.ts
import * as fileSaver from 'file-saver';
export class MyComponent {
constructor(private http: HttpClient){}
downloadFile(path: string) {
this.startLoading();
this.http.get(`${MY_APP_URL}/${path}`, { responseType: 'blob' })
.pipe(tap(blob: Blob => fileSaver.saveAs(blob, 'your_csv_file_name.csv')))
.subscribe(() => this.stopLoading(), err => this.handleErr(err));
}
}
Hope this helps a little :-)

SignalR on new relic it's on top of error rate with this request: [host]/signalr/connect/

I have a Sitefinity app hosted in Azure CloudService and I use signalr to push data to clients. Recently monitoring the app with new relic I found that the signalr its on top of error rate with this request: [host]/signalr/connect/ as you may see in the image. Also say that push work perfectly.
capture of error rate in newrelic
My Server side:
[assembly: OwinStartup(typeof(Startup))]
namespace SitefinityWebApp.FXS.Custom.AppStart
{
public class Startup
{
public void Configuration(IAppBuilder app)
{
GlobalHost.HubPipeline.AddModule(new SignalRErrorHandler());
var hubConfiguration = new HubConfiguration
{
EnableDetailedErrors = true
};
app.MapSignalR(hubConfiguration);
}
}
}
namespace SitefinityWebApp.Hubs.Posts
{
public class PostHub : Hub
{
}
public class PostNotification : IPostNotification
{
private readonly static Lazy<PostNotification> instance = new Lazy<PostNotification>(() => new PostNotification());
private readonly IHubContext iHubContext;
private PostNotification()
: this(GlobalHost.ConnectionManager.GetHubContext<PostHub>())
{
}
internal PostNotification(IHubContext iHubContext)
{
this.iHubContext = Guard.ArgumentNotNullAndReturn(iHubContext, "iHubContext");
}
public static PostNotification Instance
{
get
{
return instance.Value;
}
}
public void PostCreated(string action, PostResponse post)
{
var proxy = this.iHubContext.Clients.All;
proxy.Invoke(action, post);
}
}
}
My client side:
(function () {
Class.PostNotifications = function () {
var parent = Class.Base(),
_this = Util.extendObject(parent);
_this.ReconnectionTime = 5000;
_this.PostCreated_ObserverSubject = null;
_this.PostHub = null;
_this.ContentType = "";
_this.constructor = function (contentType) {
_this.ContentType = contentType;
_this.startConnection();
_this.setVars();
_this.configurePostHub();
};
_this.setVars = function () {
_this.PostHub = $.connection.postHub;
_this.PostCreated_ObserverSubject = new Class.Patterns.Observer.Subject();
};
_this.configurePostHub = function () {
_this.PostHub.client[_this.ContentType + 'Created'] = function (post) {
_this.PostCreated_ObserverSubject.notify(post);
};
};
_this.whenPostCreated = function (functionDelegate) {
if (functionDelegate !== undefined && functionDelegate !== null) {
var json = {
'UpdateDelegate': functionDelegate
};
var observer = new Class.Patterns.Observer.Observer();
observer.init(json);
_this.PostCreated_ObserverSubject.addObserver(observer);
}
};
_this.startConnection = function () {
$.connection.hub.logging = true;
$.connection.hub.start().done(function () {
console.log("connection stablish");
}).fail(function (err) {
console.log(err);
});
$.connection.hub.disconnected(function () {
setTimeout(function () {
$.connection.hub.start();
}, _this.ReconnectionTime);
});
};
return _this;
};
}());
Any help would be appreciated

Categories

Resources