I've got a problem with sign up request on my local machine. When I set a project on my local machine from docker and tried to sign up to mt local server(localhost:3000), I got an error 401 Unauthorized, but I don't get this error when I git clone project fron github and run npm run dev. And it works fine when the project on my web server in internet.
Tokens should be saved in local storage, but there are no tokens at all.
I tried to set {withCredentials: true} but it didn't work too.
//storage.js
const storage = {
get: (key) => {return localStorage.getItem(key)}
}
export const getAccessToken = () => {
return storage.get('accessToken')
}
//authApi.js
import {$api_id} from '../api'
export const AuthApi = {
async signup(payload) {
const response = await $api_id.post('/auth/signup', payload)
return response
},
async signin(payload) {
const response = await $api_id.post('/auth/signin', payload)
return response
}
}
//api.js
import axios, {AxiosResponse} from 'axios'
import {getAccessToken, getDeviceId} from 'utils'
export const API_URL = process.env.NEXT_PUBLIC_API_KEY
$api_id.baseURL = process.env.NEXT_PUBLIC_API_KEY
export const $api_id = axios.create({
withCredentials: false,
baseURL: API_ID_URL,
})
$api_id.interceptors.request.use((config) => {
config.headers.Authorization = `Bearer ${getAccessToken()}` //getAccessToken(storage.get('accessToken'))
if(config.data.avatar) {
const formData = new FormData()
formData.append('deviceId', getDeviceId()) //getDeviceId(storage.get("deviceId")) - fingerprintjs
Object.entries(config.data).forEach(([name, value]) => {
formData.append(name, value)
})
config.data = formData
} else {
let data
if(typeof config.data === 'string') {
data = JSON.parse(config.data)
} else {
data = config.data
}
if(!data.deviceId) {
data.deviceId = getDeviceId()
}
// console.log({data})
}
Thanks!
Related
In my code, when I try to call an external endpoint from the Jira issue panel app, I get a log "try" and the fetch doesn't happen.
I would appreciate it, if someone could help me.
Thanks in advance!
import ForgeUI, {
IssuePanel,
useProductContext,
useEffect,
} from "#forge/ui";
import { url } from "./constants";
import API, { fetch } from "#forge/api";
const Panel = () => {
const productContext = useProductContext();
const fetchTaskItems = async (issueID) => {
const requestOptions = {
method: "GET",
mode: "cors",
headers: { "Content-Type": "application/json" },
};
try {
console.log("try");
const res = await fetch(
url + `experience/task/item/${issueID}`,
requestOptions
);
console.log(res);
} catch (err) {
console.log(err.message);
}
};
useEffect(() => {
const currentIssueId = productContext.platformContext.issueKey;
console.log("Current issue ID:", currentIssueId);
fetchTaskItems(currentIssueId);
}, []);
You can import userAction from ForgeUI
import ForgeUI, {useAction} from "#forge/ui";
Then call your fetchTaskItems like this
const [req] = useAction(
(value) => value,
async () => await fetchTaskItems()
);
let me know if it works,
I have an issue with sign up.
When I try to sign up, server responses with 401 Unauthorized
This problem happens only when I start this project on localhost using docker. If I git clone project on my localhost - it works, but when I use docker - it doesn't
Request:
POST /auth/signup HTTP/1.1
Authorization: Bearer null
Response:
HTTP/1.1 401 Unauthorized
There is code below:
//authApi.js
import {$api_id} from '../api'
export const AuthApi = {
async signup(payload) {
const response = await $api_id.post('/auth/signup', payload)
return response
},
async signin(payload) {
const response = await $api_id.post('/auth/signin', payload)
return response
}
}
//storage.js
const storage = {
get: (key) => {return localStorage.getItem(key)}
}
export const getAccessToken = () => {
return storage.get('accessToken')
}
//api.js
import axios, {AxiosResponse} from 'axios'
import {getAccessToken, getDeviceId} from 'utils'
export const API_URL = process.env.NEXT_PUBLIC_API_KEY
$api_id.baseURL = process.env.NEXT_PUBLIC_API_KEY
export const $api_id = axios.create({
withCredentials: false,
baseURL: API_ID_URL,
})
$api_id.interceptors.request.use((config) => {
config.headers.Authorization = `Bearer ${getAccessToken()}` //getAccessToken(storage.get('accessToken'))
if(config.data.avatar) {
const formData = new FormData()
formData.append('deviceId', getDeviceId()) //getDeviceId(storage.get("deviceId")) - fingerprintjs
Object.entries(config.data).forEach(([name, value]) => {
formData.append(name, value)
})
config.data = formData
} else {
let data
if(typeof config.data === 'string') {
data = JSON.parse(config.data)
} else {
data = config.data
}
if(!data.deviceId) {
data.deviceId = getDeviceId()
}
// console.log({data})
}
return config
})
Thank you!
The following code constructs a redis client and exports. I am fetching the redis password from vault secret management service and that call is a promise/async. The code doesnt wait for that call and it exports the redis client before async call completes. I am not sure what I am doing wrong here. Any idea?
import redis from 'redis';
import bluebird from 'bluebird';
import logger from '../logger';
import srvconf from '../srvconf';
import { getVaultSecret } from '../services/vault.service';
const vaultConfig = srvconf.get('vault');
bluebird.promisifyAll(redis);
let redisUrl = '';
const maskRedisUrl = (url) => url.replace(/password=.*/, 'password=*****');
const setRedisUrl = (host, port, pw) => {
const pwstring = pw ? `?password=${pw}` : '';
const url = `redis://${host}:${port}${pwstring}`;
console.log(`Setting redis_url to '${maskRedisUrl(url)}'`);
return url;
}
if (vaultConfig.use_vault) {
(async () => {
const secret = await getVaultSecret(`${vaultConfig.redis.secrets_path + vaultConfig.redis.key}`)
redisUrl = setRedisUrl(srvconf.get('redis_host'), srvconf.get('redis_port'), secret.PASSWORD);
})().catch(err => console.log(err));
} else {
if (!srvconf.get('redis_url')) {
redisUrl = setRedisUrl(srvconf.get('redis_host'), srvconf.get('redis_port'), srvconf.get('redis_password'));;
} else {
redisUrl = srvconf.get('redis_url');
console.log(`Found redis_url ${maskRedisUrl(redisUrl)}`);
}
}
const options = redisUrl
? { url: redisUrl }
: {};
const redisClient = redis.createClient(options);
redisClient.on('error', err => {
logger.error(err);
});
export default redisClient;
The problem is that (async () => {...})() returns a Promise and you are not awaiting it at the top-level, so the script continues to run past that line, sets options = {} and returns the redisClient.
What you need is a top-level await which is enabled by default in Node versions >= 14.8.0. However, if your project uses a version older than that, there is a workaround as shown below.
Please note that the below code is NOT tested since I do not have the same project setup locally.
Module
import redis from "redis";
import bluebird from "bluebird";
import logger from "../logger";
import srvconf from "../srvconf";
import { getVaultSecret } from "../services/vault.service";
const vaultConfig = srvconf.get("vault");
bluebird.promisifyAll(redis);
let redisUrl = "";
let redisClient = null;
const initRedisClient = () => {
const options = redisUrl ? { url: redisUrl } : {};
redisClient = redis.createClient(options);
redisClient.on("error", (err) => {
logger.error(err);
});
};
const maskRedisUrl = (url) => url.replace(/password=.*/, "password=*****");
const setRedisUrl = (host, port, pw) => {
const pwstring = pw ? `?password=${pw}` : "";
const url = `redis://${host}:${port}${pwstring}`;
console.log(`Setting redis_url to '${maskRedisUrl(url)}'`);
return url;
};
(async () => {
if (vaultConfig.use_vault) {
try {
const secret = await getVaultSecret(
`${vaultConfig.redis.secrets_path + vaultConfig.redis.key}`
);
redisUrl = setRedisUrl(
srvconf.get("redis_host"),
srvconf.get("redis_port"),
secret.PASSWORD
);
} catch (err) {
console.log(err);
}
} else {
if (!srvconf.get("redis_url")) {
redisUrl = setRedisUrl(
srvconf.get("redis_host"),
srvconf.get("redis_port"),
srvconf.get("redis_password")
);
} else {
redisUrl = srvconf.get("redis_url");
console.log(`Found redis_url ${maskRedisUrl(redisUrl)}`);
}
}
// Initialize Redis client after vault secrets are loaded
initRedisClient();
})();
export default redisClient;
Usage
At all places where you import and use the client, you always need to check if it is actually initialized successfully, and throw (and catch) a well defined error if it is not.
const redisClient = require("path/to/module");
...
if (redisClient) {
// Use it
} else {
throw new RedisClientNotInitializedError();
}
...
Hope you could help me out with the following. I am trying to upload an excel file of ≈3MB from the client side to the API by first converting the file to a DataURL where after I send it as a string. This is working for smaller files, but it somehow seems to be blocking my larger files.
When I upload the file, I get the following error.
POST body missing. Did you forget use body-parser middleware?
I have done my own research and found more people with the same problem, though I could not find a solution.
https://github.com/apollographql/apollo-server/issues/792
This is the code I am using on the server side.
import { ApolloServer, gql } from 'apollo-server-micro'
type Props = {
_id: string
file: string[]
}
const typeDefs = gql`
type Mutation {
uploadFile(file: [String!]!): Boolean!
}
type Query {
readUpload(_id: String!): Boolean!
}
`
const resolvers = {
Mutation: {
async uploadFile(_: any, { file }: Props) {
console.log(file)
return true
}
},
Query: {
async readUpload(_: any, { _id }: Props) {
}
}
}
const apolloServer = new ApolloServer({
typeDefs,
resolvers
})
export const config = {
api: {
bodyParser: false
}
}
// Ensure to put a slash as the first character to prevent errors.
export default apolloServer.createHandler({ path: '/api/uploads' })
This is the code I am using on the client side.
import { useRef } from 'react'
import { uploadFile } from '../graphql/fetchers/uploads'
import { UPLOAD_FILE_QUERY } from '../graphql/queries/uploads'
export default function Upload() {
const inputElement = useRef<HTMLInputElement>(null)
const submitForm = (event: any) => {
event.preventDefault()
const files = inputElement.current?.files
if (files) {
const fileReader = new FileReader()
fileReader.onload = async () => {
try {
const result = fileReader.result as string
try {
console.log(result)
await uploadFile(UPLOAD_FILE_QUERY, { file: result })
} catch(error) {
console.log(error)
}
} catch(error) {
console.log(error)
}
}
fileReader.readAsDataURL(files[0])
}
}
return (
<form>
<input ref={inputElement} type='file'></input>
<button onClick={(event) => submitForm(event)}>Submit</button>
</form>
)
}
export const config = {
api: {
bodyParser: false
}
}
set bodyParser to true
Set bodyParser size limit
export const config = {
api: {
bodyParser: {
sizeLimit: '4mb' // Set desired value here
}
}
}
you try to send file as string in json? I think you should use multipart/form data on client side and parse them with special middleware on server side
On client special link converts request to multipart/formdata
full example https://github.com/jaydenseric/apollo-upload-examples
import { useMemo } from "react"
import { ApolloClient, createHttpLink, InMemoryCache } from "#apollo/client"
import { setContext } from "#apollo/client/link/context"
import { getUserTokenFromLocalStorage } from "../utils/utils"
import { createUploadLink } from "apollo-upload-client"
let apolloClient
const httpLink = createUploadLink({
uri: "/api/graphql",
headers: {
"keep-alive": "true",
},
})
const authLink = setContext((_, { headers }) => {
let token = getUserTokenFromLocalStorage()
return {
headers: {
...headers,
authorization: token ? `Bearer ${token}` : "",
},
}
})
function createIsomorphLink() {
if (typeof window === "undefined") {
const { SchemaLink } = require("#apollo/client/link/schema")
const { schema } = require("./schema")
return new SchemaLink({ schema })
} else {
return authLink.concat(httpLink)
}
}
function createApolloClient() {
return new ApolloClient({
ssrMode: typeof window === "undefined",
link: createIsomorphLink(),
cache: new InMemoryCache(),
})
}
export function initializeApollo(initialState = null) {
const _apolloClient = apolloClient ?? createApolloClient()
// If your page has Next.js data fetching methods that use Apollo Client, the initial state
// gets hydrated here
if (initialState) {
_apolloClient.cache.restore(initialState)
}
// For SSG and SSR always create a new Apollo Client
if (typeof window === "undefined") return _apolloClient
// Create the Apollo Client once in the client
if (!apolloClient) apolloClient = _apolloClient
return _apolloClient
}
export function useApollo(initialState) {
const store = useMemo(() => initializeApollo(initialState), [initialState])
return store
}
I'm coming from React/Redux-land and am slowly getting acquainted to Svelte design patterns using stores.
Currently I'm curious to figure out if this is an acceptable pattern or if not, what is a better way to pursue this kind of communication. The basic premise is I want to be able to update multiple custom stores (which are using writable) from an adjacent store.
In the example below I have "loading.js" and "error.js" stores which would be used globally, commented out in the "session.js" store. I'd like to update these based on the result of an API request to create a session, in order to keep most of my heavy lifting out side of components.
My current thinking is that I'd pass each store needed through the "createSessionStore" function, but it feels a little clunky as it would highly depend on the declaration order of each store within "store.js"
The long term intention for wishing to do it this way is so I can add any kind of communication layer (such as web sockets) in to the mix and update the global loading or error store from any layer.
Thanks for the help.
Component.svelte
<script>
import { onMount } from "svelte";
import { error, loading, session } from "./store";
onMount(() => {
session.fetchSession();
});
</script>
{#if $loading}
<div>Loading...</div>
{/if}
{#if $error}
<div>Something went wrong: {$error}</div>
{/if}
store.js
import { createErrorStore } from "./error";
import { createLoadingStore } from "./loading";
import { createSessionStore } from "./session";
export const error = createErrorStore();
export const loading = createLoadingStore();
export const session = createSessionStore();
session.js
import { writable } from "svelte/store";
const INITIAL_STORE = {
token: null
};
export const createSessionStore = (initialStore = INITIAL_STORE) => {
const { subscribe, set } = writable(initialStore);
const fetchSession = async () => {
// loading.set(true);
try {
const response = await fetch("MY_API_ENDPOINT/auth/token", {
method: "POST",
});
if (!response.ok) {
const err = new Error("Network response was not ok.");
// error.set(err);
// loading.set(false);
return;
}
const data = await response.json();
set(data.token);
// loading.set(false);
} catch (err) {
// error.set(err);
// loading.set(false);
}
};
const reset = () => {
set(initialStore);
};
return {
subscribe,
fetchSession,
reset
};
};
error.js
import { writable } from "svelte/store";
const INITIAL_STORE = false;
export const createErrorStore = (initialStore = INITIAL_STORE) => {
const { subscribe, set } = writable(initialStore);
const reset = () => {
set(initialStore);
};
return {
subscribe,
set,
reset
};
};
loading.js
import { writable } from "svelte/store";
const INITIAL_STORE = false;
export const createLoadingStore = (initialStore = INITIAL_STORE) => {
const { subscribe, set } = writable(initialStore);
const reset = () => {
set(initialStore);
};
return {
subscribe,
set,
reset
};
};
Interesting idea.
The problem here is that during the creation of the stores, not all of them exists yet. The only solution that I see for this is to add the references after creating them.
Here's my idea:
In the session.js:
import { writable } from "svelte/store";
const INITIAL_STORE = {
token: null
};
export const createSessionStore = (initialStore = INITIAL_STORE) => {
const { subscribe, set } = writable(initialStore);
const fetchSession = async () => {
// loading.set(true);
try {
otherStores.loading && otherStores.loading.set(true);
const response = await fetch("MY_API_ENDPOINT/auth/token", {
method: "POST",
});
if (!response.ok) {
const err = new Error("Network response was not ok.");
otherStores.error && otherStores.error.set(err);
otherStores.loading && otherStores.loading.set(false);
return;
}
const data = await response.json();
set(data.token);
} catch (err) {
otherStores.error && otherStores.error.set(err);
otherStores.loading && otherStores.loading.set(false);
}
};
const reset = () => {
set(initialStore);
};
let otherStores = {}
const setOtherStores = (stores) => {
otherStores=stores
};
return {
subscribe,
fetchSession,
reset,
setOtherStores
};
};
In the store.js:
import { createErrorStore } from "./error";
import { createLoadingStore } from "./loading";
import { createSessionStore } from "./session";
export const error = createErrorStore();
export const loading = createLoadingStore();
export const session = createSessionStore();
session.setOtherStores({error,loading})
You can use the same pattern for any of the other stores (if needed), and after creation pass them the references to the other stores.