How to load fingerprinting js asynchronously and get visitor id - javascript

I'm using javascript scripting with a legacy version of iMacros (v8.9.7)
And I'm a bit confused about getting fingerprinting JS to work properly.
My use case :
1- load fingerprintingJS from CDN asynchronously
2- store the visitorID into variable
My code :
Components.utils.import("resource://gre/modules/Promise.jsm");
Components.utils.import("resource://gre/modules/devtools/Console.jsm");
function initFingerprintJS() {
FingerprintJS.load()
.then(fp => fp.get())
.then(result => {
const visitorId = result.visitorId;
window.console.log(visitorId);
});
}
const loadScript = (src, async = true, type = "text/javascript") => {
return new Promise((resolve, reject) => {
try {
const el = window.document.createElement("script");
const container = window.document.head || window.document.body;
el.type = type;
el.async = async;
el.src = src;
el.addEventListener("load", () => {
resolve({ status: true });
});
el.addEventListener("error", () => {
reject({
status: false,
message: `Failed to load the script ${src}`
});
});
container.appendChild(el);
} catch (err) {
reject(err);
}
});
};
function get_fingerprint() {
loadScript("https://cdn.jsdelivr.net/npm/#fingerprintjs/fingerprintjs#3/dist/fp.min.js")
.then((data) => {
window.console.log("Script loaded successfully", data);
initFingerprintJS();
})
.catch((err) => {
window.console.error(err);
});
}
<button onclick="get_fingerprint()">FINGERPRINT</button>
when I remove the two Components imports and run the bit of script in the console, everything works. But when it's from iMacros interface, it generates errors
GET
https://cdn.jsdelivr.net/npm/#fingerprintjs/fingerprintjs#3/dist/fp.min.js
[HTTP/2.0 200 OK 0 ms] Script loaded successfully Object { status:
true } ReferenceError: FingerprintJS is not defined Trace de la pile :
initFingerprintJS#resource://gre/modules/RemoteAddonsParent.jsm:1102:5
get_fingerprint/<#resource://gre/modules/RemoteAddonsParent.jsm:1142:9
Handler.prototype.process#resource://gre/modules/Promise.jsm ->
resource://gre/modules/Promise-backend.js:932:23
this.PromiseWalker.walkerLoop#resource://gre/modules/Promise.jsm ->
resource://gre/modules/Promise-backend.js:813:7
this.PromiseWalker.scheduleWalkerLoop/<#resource://gre/modules/Promise.jsm
-> resource://gre/modules/Promise-backend.js:747:11
Any helps please

Related

Broadcasting to all clients with Deno websocket

I want to add notifications to an application I've developed.
Unfortunately, Deno has removed the ws package.(https://deno.land/std#0.110.0/ws/mod.ts)
That's why I'm using the websocket inside the denon itself. Since it doesn't have many functions, I have to add some things myself.
For example, sending all messages to open clients.
What I want to do is when the pdf is created, a (data, message) comes from the socket and update the notifications on the page according to the incoming data.
I keep all open clients in a Map. and when the pdf is created, I return this Map and send it to all sockets (data, message).
However, this works for one time.
server conf...
import {
path,
paths,
ctid,
} from "../deps.ts";
const users = new Map();
const sockets = new Map()
const userArr = [];
export const startNotif = (socket,req) => {
const claims = req.get("claims");
const org = req.get("org");
claims.org = org;
console.log("connected")
users.set(claims.sub, {"username":claims.sub,"socket":socket})
users.forEach((user)=>{
if(userArr.length === 0){
userArr.push(user)
}
else if(userArr.every((w)=> w.username !== user.username) )
userArr.push(user)
})
sockets.set(org, userArr)
function broadcastMessage(message) {
sockets.get(org).map((u)=>{
console.log(u.socket.readyState)
u.socket.send(message)
})
}
if (socket.readyState === 3) {
sockets.delete(uid)
return
}
const init = (msg) => {
socket.send(
JSON.stringify({
status: "creating",
})
);
};
const ondata = async (msg) => {
const upfilepath = path.join(paths.work, `CT_${msg.sid}_report.pdf`);
try {
const s=await Deno.readTextFile(upfilepath);
if(s){
socket.send(
JSON.stringify({
status: "end",
})
);
} else {
socket.send(
JSON.stringify({
status: "creating",
})
);
}
} catch(e) {
if(e instanceof Deno.errors.NotFound)
console.error('file does not exists');
}
};
const end = () => {
try {
const endTime = Date.now()
const msg = "Your PDF has been created"
const id = ctid(12) // random id create
broadcastMessage(
JSON.stringify({
id: id,
date: endTime,
status: "done",
message: msg,
read: 'negative',
action: 'pdf'
})
);
} catch (e) {
console.log(400, "Cannot send.", e);
}
}
socket.onmessage = async (e) => {
const cmd = JSON.parse(e.data);
if(cmd.bid === 'start'){
await init(cmd)
}
if(!cmd.bid && cmd.sid){
await ondata(cmd)
}
if(cmd.bid === 'end'){
await end();
}
}
socket.onerror = (e) => {
console.log(e);
};
}
client conf...
export const webSocketHandler = (request) =>
new Promise((res, rej) => {
let url;
if (!process.env.NODE_ENV || process.env.NODE_ENV === 'development') {
url = `http://localhost:8080/api/notifications/ws`.replace('http', 'ws');
} else {
url = `${window.location.origin}/api/notifications/ws`.replace('http', 'ws');
}
const token = JSON.parse(sessionStorage.getItem('token'));
const orgname = localStorage.getItem('orgname');
const protocol = `${token}_org_${orgname}`;
const socket = new WebSocket(url, protocol);
const response = Object.create({});
socket.onopen = function () {
socket.send(
JSON.stringify({
bid: 'start',
})
);
};
socket.onmessage = function (event) {
response.data = JSON.parse(event.data);
if (response.data.status === 'creating') {
socket.send(
JSON.stringify({
sid: request.sid,
})
);
} else if (response.data.status === 'end') {
socket.send(
JSON.stringify({
bid: 'end',
})
);
} else if (response.data.status === 'done') {
try {
res(response);
} catch (err) {
rej(err);
}
}
};
socket.onclose = function (event) {
response.state = event.returnValue;
};
socket.onerror = function (error) {
rej(error);
};
});
onclick function of button I use in component...
const donwloadReport = async (type) => {
const query = `?sid=${sid}&reportType=${type}`;
const fileName = `CT_${sid}_report.${type}`;
try {
type === 'pdf' && setLoading(true);
const response = await getScanReportAction(query);
const request = {
sid,
};
webSocketHandler(request)
.then((data) => {
console.log(data);
dispatch({
type: 'update',
data: {
id: data.data.id,
date: data.data.date,
message: data.data.message,
action: data.data.action,
read: data.data.read,
},
});
})
.catch((err) => {
console.log(err);
});
if (type === 'html') {
downloadText(response.data, fileName);
} else {
const blobUrl = await readStream(response.data);
setLoading(false);
downloadURL(blobUrl, fileName);
}
} catch (err) {
displayMessage(err.message);
}
};
Everything works perfectly the first time. When I press the download button for the pdf, the socket works, then a data is returned and I update the notification count with the context I applied according to this data.
Later I realized that this works in a single tab. When I open a new client in the side tab, my notification count does not increase. For this, I wanted to keep all sockets in Map and return them all and send a message to each socket separately. But in this case, when I press the download button for the second time, no data comes from the socket.
Actually, I think that I should do the socket initialization process on the client in the context. When you do this, it starts the socket 2 times in a meaningless way.
In summary, consider an application with organizations and users belonging to those organizations. If the clients of A, B, C users belonging to X organization are open at the same time and user A pressed a pdf download button, I want A, B, C users to be notified when the pdf is downloaded.
I would be very grateful if someone could show me a way around this issue.
Have you looked at the BroadcastChannel API? Maybe that could solve your issue. See for example:
Deno specific: https://medium.com/deno-the-complete-reference/broadcast-channel-in-deno-f76a0b8893f5
Web/Browser API: https://developer.mozilla.org/en-US/docs/Web/API/Broadcast_Channel_API

Mock server error - The script has an unsupported MIME type ('text/html')

When i running my app i get this error :
I'm using msw versin: 0.39.2
My msw file in the public folder :
and this is my msw file :
/* eslint-disable */
/* tslint:disable */
/**
* Mock Service Worker (0.39.2).
* #see https://github.com/mswjs/msw
* - Please do NOT modify this file.
* - Please do NOT serve this file on production.
*/
const INTEGRITY_CHECKSUM = '*****'
const bypassHeaderName = '*****'
const activeClientIds = new Set()
self.addEventListener('install', function () {
return self.skipWaiting()
})
self.addEventListener('activate', async function (event) {
return self.clients.claim()
})
self.addEventListener('message', async function (event) {
const clientId = event.source.id
if (!clientId || !self.clients) {
return
}
const client = await self.clients.get(clientId)
if (!client) {
return
}
const allClients = await self.clients.matchAll()
switch (event.data) {
case 'KEEPALIVE_REQUEST': {
sendToClient(client, {
type: 'KEEPALIVE_RESPONSE',
})
break
}
case 'INTEGRITY_CHECK_REQUEST': {
sendToClient(client, {
type: 'INTEGRITY_CHECK_RESPONSE',
payload: INTEGRITY_CHECKSUM,
})
break
}
case 'MOCK_ACTIVATE': {
activeClientIds.add(clientId)
sendToClient(client, {
type: 'MOCKING_ENABLED',
payload: true,
})
break
}
case 'MOCK_DEACTIVATE': {
activeClientIds.delete(clientId)
break
}
case 'CLIENT_CLOSED': {
activeClientIds.delete(clientId)
const remainingClients = allClients.filter((client) => {
return client.id !== clientId
})
// Unregister itself when there are no more clients
if (remainingClients.length === 0) {
self.registration.unregister()
}
break
}
}
})
// Resolve the "main" client for the given event.
// Client that issues a request doesn't necessarily equal the client
// that registered the worker. It's with the latter the worker should
// communicate with during the response resolving phase.
async function resolveMainClient(event) {
const client = await self.clients.get(event.clientId)
if (client.frameType === 'top-level') {
return client
}
const allClients = await self.clients.matchAll()
return allClients
.filter((client) => {
// Get only those clients that are currently visible.
return client.visibilityState === 'visible'
})
.find((client) => {
// Find the client ID that's recorded in the
// set of clients that have registered the worker.
return activeClientIds.has(client.id)
})
}
async function handleRequest(event, requestId) {
const client = await resolveMainClient(event)
const response = await getResponse(event, client, requestId)
// Send back the response clone for the "response:*" life-cycle events.
// Ensure MSW is active and ready to handle the message, otherwise
// this message will pend indefinitely.
if (client && activeClientIds.has(client.id)) {
;(async function () {
const clonedResponse = response.clone()
sendToClient(client, {
type: 'RESPONSE',
payload: {
requestId,
type: clonedResponse.type,
ok: clonedResponse.ok,
status: clonedResponse.status,
statusText: clonedResponse.statusText,
body:
clonedResponse.body === null ? null : await clonedResponse.text(),
headers: serializeHeaders(clonedResponse.headers),
redirected: clonedResponse.redirected,
},
})
})()
}
return response
}
async function getResponse(event, client, requestId) {
const { request } = event
const requestClone = request.clone()
const getOriginalResponse = () => fetch(requestClone)
// Bypass mocking when the request client is not active.
if (!client) {
return getOriginalResponse()
}
// Bypass initial page load requests (i.e. static assets).
// The absence of the immediate/parent client in the map of the active clients
// means that MSW hasn't dispatched the "MOCK_ACTIVATE" event yet
// and is not ready to handle requests.
if (!activeClientIds.has(client.id)) {
return await getOriginalResponse()
}
// Bypass requests with the explicit bypass header
if (requestClone.headers.get(bypassHeaderName) === 'true') {
const cleanRequestHeaders = serializeHeaders(requestClone.headers)
// Remove the bypass header to comply with the CORS preflight check.
delete cleanRequestHeaders[bypassHeaderName]
const originalRequest = new Request(requestClone, {
headers: new Headers(cleanRequestHeaders),
})
return fetch(originalRequest)
}
// Send the request to the client-side MSW.
const reqHeaders = serializeHeaders(request.headers)
const body = await request.text()
const clientMessage = await sendToClient(client, {
type: 'REQUEST',
payload: {
id: requestId,
url: request.url,
method: request.method,
headers: reqHeaders,
cache: request.cache,
mode: request.mode,
credentials: request.credentials,
destination: request.destination,
integrity: request.integrity,
redirect: request.redirect,
referrer: request.referrer,
referrerPolicy: request.referrerPolicy,
body,
bodyUsed: request.bodyUsed,
keepalive: request.keepalive,
},
})
switch (clientMessage.type) {
case 'MOCK_SUCCESS': {
return delayPromise(
() => respondWithMock(clientMessage),
clientMessage.payload.delay,
)
}
case 'MOCK_NOT_FOUND': {
return getOriginalResponse()
}
case 'NETWORK_ERROR': {
const { name, message } = clientMessage.payload
const networkError = new Error(message)
networkError.name = name
// Rejecting a request Promise emulates a network error.
throw networkError
}
case 'INTERNAL_ERROR': {
const parsedBody = JSON.parse(clientMessage.payload.body)
console.error(
`\
[MSW] Uncaught exception in the request handler for "%s %s":
${parsedBody.location}
This exception has been gracefully handled as a 500 response, however, it's strongly recommended to resolve this error, as it indicates a mistake in your code. If you wish to mock an error response, please see this guide: https://mswjs.io/docs/recipes/mocking-error-responses\
`,
request.method,
request.url,
)
return respondWithMock(clientMessage)
}
}
return getOriginalResponse()
}
self.addEventListener('fetch', function (event) {
const { request } = event
const accept = request.headers.get('accept') || ''
// Bypass server-sent events.
if (accept.includes('text/event-stream')) {
return
}
// Bypass navigation requests.
if (request.mode === 'navigate') {
return
}
// Opening the DevTools triggers the "only-if-cached" request
// that cannot be handled by the worker. Bypass such requests.
if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') {
return
}
// Bypass all requests when there are no active clients.
// Prevents the self-unregistered worked from handling requests
// after it's been deleted (still remains active until the next reload).
if (activeClientIds.size === 0) {
return
}
const requestId = uuidv4()
return event.respondWith(
handleRequest(event, requestId).catch((error) => {
if (error.name === 'NetworkError') {
console.warn(
'[MSW] Successfully emulated a network error for the "%s %s" request.',
request.method,
request.url,
)
return
}
// At this point, any exception indicates an issue with the original request/response.
console.error(
`\
[MSW] Caught an exception from the "%s %s" request (%s). This is probably not a problem with Mock Service Worker. There is likely an additional logging output above.`,
request.method,
request.url,
`${error.name}: ${error.message}`,
)
}),
)
})
function serializeHeaders(headers) {
const reqHeaders = {}
headers.forEach((value, name) => {
reqHeaders[name] = reqHeaders[name]
? [].concat(reqHeaders[name]).concat(value)
: value
})
return reqHeaders
}
function sendToClient(client, message) {
return new Promise((resolve, reject) => {
const channel = new MessageChannel()
channel.port1.onmessage = (event) => {
if (event.data && event.data.error) {
return reject(event.data.error)
}
resolve(event.data)
}
client.postMessage(JSON.stringify(message), [channel.port2])
})
}
function delayPromise(cb, duration) {
return new Promise((resolve) => {
setTimeout(() => resolve(cb()), duration)
})
}
function respondWithMock(clientMessage) {
return new Response(clientMessage.payload.body, {
...clientMessage.payload,
headers: clientMessage.payload.headers,
})
}
function uuidv4() {
return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function (c) {
const r = (Math.random() * 16) | 0
const v = c == 'x' ? r : (r & 0x3) | 0x8
return v.toString(16)
})
}
Any help?
The problem was in my package.json
When you add homepage to the package.json, the msw don't know the url.
I removed the homepage and restart my app.
The issue is your web-server: it’s returning a Content-type of 'text-html'. I bet you have misspelled the name of the script and it’s sending an HTML 404 page.

expo-file-system .downloadAsync leads to half-gray corrupted images

IMAGE OF THE PROBLEM This is part of a custom Cached Image component. It will sometimes render a corrupted image on first load and no previous cache, when it is first downloaded. As far as I know this happens only on Android and could be related to internet speed, but of that I am not sure. The code is:
import { Image } from 'react-native'
import * as FileSystem from 'expo-file-system'
import PropTypes from 'prop-types'
const NoImage = require('../../assets/NoImage.png');
const CachedImage = props => {
const { source: { uri }, cacheKey } = props
let filesystemURI = `${FileSystem.cacheDirectory}${cacheKey}`
const [imgSOURCE, setImgSOURCE] = useState({ uri: filesystemURI })
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms));
const componentIsMounted = useRef(true)
useEffect(() => {
componentIsMounted.current = true;
if (componentIsMounted.current) {
setImgSOURCE({ uri: filesystemURI })
}
return () => {
componentIsMounted.current = false
}
}, [cacheKey])
useEffect(() => {
filesystemURI = `${FileSystem.cacheDirectory}${cacheKey}`
const loadImage = async ({ fileURI }) => {
try {
// Use the cached image if it exists
const metadata = await FileSystem.getInfoAsync(fileURI).catch(function (error) {
console.log('There has been a problem with the getInfo operation: ' + error.message);
// ADD THIS THROW error
throw error;
});
if (!metadata.exists) {
// download to cache
if (componentIsMounted.current) {
setImgSOURCE(NoImage)
FileSystem.downloadAsync(
uri,
fileURI
).then(({ uri, status, headers }) => {
if (componentIsMounted.current) {
//sleep(100);
setImgSOURCE({ uri: uri })
}
})
.catch(function (error) {
console.log('There has been a problem with your fetch operation: ' + error.message);
// ADD THIS THROW error
throw error;
});
}
}
} catch (err) {
console.log('cache image error:' + err)
if (componentIsMounted.current) {
setImgSOURCE({ uri: uri })
}
}
}
loadImage({ fileURI: filesystemURI })
return () => {
componentIsMounted.current = false
}
}, [cacheKey])
return (
<Image
{...props}
onError={() => { setImgSOURCE(NoImage) }}
source={imgSOURCE}
/>
)
}
CachedImage.propTypes = {
source: PropTypes.object.isRequired,
cacheKey: PropTypes.string.isRequired,
}
export default CachedImage
I have added a commented-out sleep before the setState for the image but haven't properly tested it. expo-file-system version is 13.1.3. It has also happened to other people:
React-native expo FileSystem.downloadAsync corrupted images
We face the same issue with Expo SDK45 and expo-file-system 14.0.0. I have the following logic in place (see code snippet) and can confirm that onError is never triggered and we still see a partial image sometimes and not the 'missing.png'. My current assumption is, that this is rather an image caching than image loading issue. Usually the image displays ok after a refresh of the screen.
import imgNone from '../../assets/images/missing.png';
const [imgSrc, setImgSrc] = useState(imageSrc(element.media[0]))
<View>
<Image key={element.media[0]} source={imgSrc} onError={() => setImgSrc(imgNone)} alt={i18n.t('global.missingImage')} size='sm'/>
</View>
Forgot to mention: I did some fiddling with image width and height and was able to display 'old' versions of the image after the image was reloaded. That as well led to the image caching assumption.

How to read multiple json file using fs and bulk request

I'm using elasticsearch search engine with my react app, I was reading one file at the backend as you see in the code and it work perfectly, but now I want to read three different JSON files to three different indexes using the "fs" package and bulk request, can you please help me?
the code:
// Start reading the json file
fs.readFile("DocRes.json", { encoding: "utf-8" }, function (err, data) {
if (err) {
throw err;
}
// Build up a giant bulk request for elasticsearch.
bulk_request = data.split("\n").reduce(function (bulk_request, line) {
var obj, ncar;
try {
obj = JSON.parse(line);
} catch (e) {
console.log("Done reading 1");
return bulk_request;
}
// Rework the data slightly
ncar = {
id: obj.id,
name: obj.name,
summary: obj.summary,
image: obj.image,
approvetool: obj.approvetool,
num: obj.num,
date: obj.date,
};
bulk_request.push({
index: { _index: "ncar_index", _type: "ncar", _id: ncar.id },
});
bulk_request.push(ncar);
return bulk_request;
}, []);
// A little voodoo to simulate synchronous insert
var busy = false;
var callback = function (err, resp) {
if (err) {
console.log(err);
}
busy = false;
};
// Recursively whittle away at bulk_request, 1000 at a time.
var perhaps_insert = function () {
if (!busy) {
busy = true;
client.bulk(
{
body: bulk_request.slice(0, 1000),
},
callback
);
bulk_request = bulk_request.slice(1000);
console.log(bulk_request.length);
}
if (bulk_request.length > 0) {
setTimeout(perhaps_insert, 100);
} else {
console.log("Inserted all records.");
}
};
perhaps_insert();
});
You can create multiple promises for each file read and feed it to the elastic search bulk_request.
const fsPromises = require('fs').promises,
files = ['filename1', 'filename1'],
response = [];
const fetchFile = async (filename) => {
return new Promise((resolve, reject) => {
const path = path.join(__dirname, filename);
try {
const data = await fsPromises.readFile(path)); // make sure path is correct
resolve(data);
} catch (e) {
reject(e)
}
});
files.forEach((fileName) => results.push(fetchFile()));
Promise.all(results).then(data => console.log(data)).catch(e => console.log(e));
}
Once you get data from all the promises pass it to the elastic search.

Angular 2 Synchronous File Upload

I am trying to upload a file to web api which takes the file as byte array using angular 2 application.
I am not able to pass the byte array from angular 2 page to web api. It looks like the File Reader read method is asynchronous. How do I make this as synchronous call or wait for the file content to be loaded before executing the next line of code?
Below is my code
//attachment on browse - when the browse button is clicked
//It only assign the file to a local variable (attachment)
fileChange = (event) => {
var files = event.target.files;
if (files.length > 0) {
this.attachment = files[0];
}
}
//when the submit button is clicked
onSubmit = () => {
//Read the content of the file and store it in local variable (fileData)
let fr = new FileReader();
let data = new Blob([this.attachment]);
fr.readAsArrayBuffer(data);
fr.onloadend = () => {
this.fileData = fr.result; //Note : This always "undefined"
};
//build the attachment object which will be sent to Web API
let attachment: Attachment = {
AttachmentId: '0',
FileName: this.form.controls["attachmentName"].value,
FileData: this.fileData
}
//build the purchase order object
let order: UpdatePurchaseOrder = {
SendEmail: true,
PurchaseOrderNumber: this.form.controls["purchaseOrderNumber"].value,
Attachment: attachment
}
//call the web api and pass the purchaseorder object
this.updatePoService
.updatePurchaseOrder(this.form.controls["purchaseOrderRequestId"].value, order)
.subscribe(data => {
if (data) {
this.saveSuccess = true;
}
else {
this.saveSuccess = false;
}
},
error => this.errors = error,
() => this.res = 'Completed'
);
}
Any hint would be useful.
regards,
-Alan-
You cannot make this async call synchronous. But you can take advantage of the observables to wait for the files to be read:
//when the submit button is clicked
onSubmit = () => {
let file = Observable.create((observer) => {
let fr = new FileReader();
let data = new Blob([this.attachment]);
fr.readAsArrayBuffer(data);
fr.onloadend = () => {
observer.next(fr.result);
observer.complete()
};
fr.onerror = (err) => {
observer.error(err)
}
fr.onabort = () => {
observer.error("aborted")
}
});
file.map((fileData) => {
//build the attachment object which will be sent to Web API
let attachment: Attachment = {
AttachmentId: '0',
FileName: this.form.controls["attachmentName"].value,
FileData: fileData
}
//build the purchase order object
let order: UpdatePurchaseOrder = {
SendEmail: true,
PurchaseOrderNumber: this.form.controls["purchaseOrderNumber"].value,
Attachment: attachment
}
return order;
})
.switchMap(order => this.updatePoService.updatePurchaseOrder(this.form.controls["purchaseOrderRequestId"].value, order))
.subscribe(data => {
if (data) {
this.saveSuccess = true;
} else {
this.saveSuccess = false;
}
},
error => this.errors = error,
() => this.res = 'Completed'
);
}
I arrived here looking for a solution for a similar issue. I'm performing requests to an endpoint which can response a binary blob if anything goes well or a JSON file in event of error.
this.httpClient.post(urlService, bodyRequest,
{responseType: 'blob', headers: headers})
.pipe(map((response: Response) => response),
catchError((err: Error | HttpErrorResponse) => {
if (err instanceof HttpErrorResponse) {
// here, err.error is a BLOB containing a JSON String with the error message
} else {
return throwError(ErrorDataService.overLoadError(err, message));
}
}));
As FileReaderSync apparently doesn't work in Angular6 I took n00dl3's solution (above) to throw the error after parsing the Blob content:
return this.httpClient.post(urlService, bodyRequest,
{responseType: 'blob', headers: headers})
.pipe(map((response: Response) => response),
catchError((err: Error | HttpErrorResponse) => {
const message = `In TtsService.getTts(${locale},${outputFormat}). ${err.message}`;
if (err instanceof HttpErrorResponse) {
const $errBlobReader: Observable<HttpErrorResponse> = Observable.create((observer) => {
const fr = new FileReader();
const errorBlob = err.error;
fr.readAsText(errorBlob, 'utf8');
fr.onloadend = () => {
const errMsg = JSON.parse(fr.result).message;
const msg = `In TtsService.getTts(${locale},${outputFormat}). ${errMsg}`;
observer.error(ErrorDataService.overLoadError(err, msg));
};
fr.onerror = (blobReadError) => {
observer.error(blobReadError);
};
fr.onabort = () => {
observer.error('aborted');
};
});
return $errBlobReader;
} else {
return throwError(ErrorDataService.overLoadError(err, message));
}
}));
Thanks! You really saved my day!

Categories

Resources