I have a problem creating a hook calling a stored procedure.
My custom service hooks (customservice.hooks.js) are :
const callstored = require('../../hooks/callstored')
module.exports = {
before: {
all: [],
find: [callstored()],
get: [],
create: [],
update: [],
patch: [],
remove: []
},
...
My service class (customservice.class.js):
const mysql = require('mysql')
class Service {
constructor (options) {
this.options = options || {};
}
find (params) {
return Promise.resolve( [] );
}
get (id, params) {
return Promise.resolve({
id, text: `A new message with ID: ${id}!`
});
}
create (data, params) {
if (Array.isArray(data)) {
return Promise.all(data.map(current => this.create(current)));
}
return Promise.resolve(data);
}
update (id, data, params) {
return Promise.resolve(data);
}
patch (id, data, params) {
return Promise.resolve(data);
}
remove (id, params) {
return Promise.resolve({ id });
}
}
module.exports = function (options) {
return new Service(options);
};
module.exports.Service = Service;
And my service (customservice.service.js) :
const createService = require('./rankingvotes.class.js');
const hooks = require('./rankingvotes.hooks');
module.exports = function (app) {
const paginate = app.get('paginate');
const options = {
name: 'rankingvotes',
paginate
};
// Initialize our service with any options it requires
app.use('/rankingvotes', createService(options));
// Get our initialized service so that we can register hooks and filters
const service = app.service('rankingvotes');
service.hooks(hooks);
app.publish(() => {
});
};
And finally my hook is (callstored.js)
const Sequelize = require('sequelize');
module.exports = function () {
return function (hook) {
sequelize = hook.app.get('sequelizeClient');
let result=[];
return sequelize.query('CALL RANKING();',{
nest: true,
raw: true }).then(function(response){
console.log(response[0]) //data are correct
hook.data=response[0];
return hook;
}).error(function(err){
console.log(err);
return hook;
});
}
}
If I check the console I got data correctly, but nothing calling from a REST client like Postman.
Any idea? Thank you.
If you want to change the response you have to set hook.result.
const Sequelize = require('sequelize');
module.exports = function () {
return async hook => {
try {
const sequelize = hook.app.get('sequelizeClient');
const response = await sequelize.query('CALL RANKING();',{
nest: true,
raw: true
});
console.log(response[0]) //data are correct
context.result = response[0];
} catch(error) {
console.error(error);
}
return hook;
}
}
hook.data is the request data and is only available for create, update and patch.
Keep in mind that setting hook.result will skip your custom service find if it is set in a before hook.
Related
I am trying to post a file object to my nuxt 3 api route
Problem is:
Data from client has my file object
Data from server returns empty object
Screenshot of the issue
Where did my file object go?
const handleImageUpload = async (evt: Event) => {
const target = evt.target as HTMLInputElement
if (target.files) {
const file = target.files[0]
const upload: iUpload = {
name: file.name,
type: file.type,
file
}
console.log("data from client", upload)
try {
const { data, error } = await useFetch(constants.imageUploadApiUrl, {
headers: { "Content-type": "application/json" },
method: 'POST',
body: upload
})
console.log("data from server", data.value)
} catch (error) {
console.log(error)
}
}
}
constants.imageUploadApiUrl (api route) has the following
import { getQuery, readBody } from "h3"
import { iUpload } from "~~/helpers/interfaces"
export default defineEventHandler(async (event) => {
try {
const query = getQuery(event)
const body = await readBody(event) as iUpload
return { body }
} catch (error: any) {
return { error: error.message }
}
})
iUpload interface is this
export interface iUpload {
name: string;
type: string;
file: File;
}
I eventually got it working. Meanwhile it's using supabase as it's backend (forgot to mention that).
But here are the changes I made.
#1 - I added a utility function to convert the file to base64 string
export const getBase64 = (file: File) => {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.readAsDataURL(file);
reader.onload = () => resolve(reader.result);
reader.onerror = error => reject(error);
});
}
#2 - I updated the handleImageUpload function like below. The only change being in the file key
const handleImageUpload = async (evt: Event) => {
const target = evt.target as HTMLInputElement
if (target.files) {
const fileObj = target.files[0]
const upload: iUpload = {
path: id(memberName(store.selected), '-'),
name: fileObj.name,
file: await getBase64(fileObj) as string, // <**=**
type: fileObj.type
}
console.log("data from client", upload)
try {
const { data, error } = await useFetch(constants.imageUploadApiUrl, {
headers: { "Content-type": "multipart/form-data" },
method: 'POST',
body: upload
})
console.log("data from server", data.value)
} catch (error) {
console.log(error)
}
}
}
#3 - Furthermore I updated the server route as follows:
export default defineEventHandler(async (event) => {
try {
const body = await readBody(event) as iUpload
const filePath = `${body.path}/${body.name}`
const res = await fetch(body.file)
const blob = await res.blob()
const response = await supabase.storage
.from("pictures")
.upload(filePath, blob, {
contentType: body.type,
upsert: true,
})
return {
data: response.data,
error: response.error?.message,
}
} catch (error: any) {
return { error: error.message }
}
})
#4 - Lastly I updated the policies on supabase storage bucket and storage object to the following:
supabase storage policy update
Below I try to respond with a stream when I receive ticker updates.
+page.server.js:
import YahooFinanceTicker from "yahoo-finance-ticker";
const ticker = new YahooFinanceTicker();
const tickerListener = await ticker.subscribe(["BTC-USD"])
const stream = new ReadableStream({
start(controller) {
tickerListener.on("ticker", (ticker) => {
console.log(ticker.price);
controller.enqueue(ticker.price);
});
}
});
export async function load() {
return response????
};
Note: The YahooFinanceTicker can't run in the browser.
How to handle / set the response in the Sveltekit load function.
To my knowledge, the load functions cannot be used for this as their responses are JS/JSON serialized. You can use an endpoint in +server to return a Response object which can be constructed from a ReadableStream.
Solution: H.B. comment showed me the right direction to push unsollicited price ticker updates the client.
api route: yahoo-finance-ticker +server.js
import YahooFinanceTicker from "yahoo-finance-ticker";
const ticker = new YahooFinanceTicker();
const tickerListener = await ticker.subscribe(["BTC-USD"])
/** #type {import('./$types').RequestHandler} */
export function GET({ request }) {
const ac = new AbortController();
console.log("GET api: yahoo-finance-ticker")
const stream = new ReadableStream({
start(controller) {
tickerListener.on("ticker", (ticker) => {
console.log(ticker.price);
controller.enqueue(String(ticker.price));
}, { signal: ac.signal });
},
cancel() {
console.log("cancel and abort");
ac.abort();
},
})
return new Response(stream, {
headers: {
'content-type': 'text/event-stream',
}
});
}
page route: +page.svelte
<script>
let result = "";
async function getStream() {
const response = await fetch("/api/yahoo-finance-ticker");
const reader = response.body.pipeThrough(new TextDecoderStream()).getReader();
while (true) {
const { value, done } = await reader.read();
console.log("resp", done, value);
if (done) break;
result += `${value}<br>`;
}
}
getStream();
</script>
<section>
<p>{#html result}</p>
</section>
I need to set the property to DataSet during onInit, to change the visiblity of some controls in my View. I could solve this problem with setting the visibility dynamically in controller. But I want to use the expression binding and the visible property in the View to set visibilites.
I'm getting an error in the function OnStartSetVisibilites. self.getView().getBindingContext() returns UNDEFINED. Without the sPath, I can't use setProperty. And without setProperty, my View-Controls don't know the visible-value.
How to fix this?
View:
<uxap:ObjectPageSubSection visible="{= ${Responsible} === '1'}" id="leader">
</uxap:ObjectPageSubSection>
OnInit in View-Controller:
onInit: function () {
var startupParameters = this.getOwnerComponent().getComponentData().startupParameters;
var sWorkitem = startupParameters.TASK[0];
this.setModel(this.getOwnerComponent().getModel());
this.getModel().metadataLoaded().then(function () {
var sObjectPath = this.getModel().createKey("DataSet", {
Workitem: sWorkitem
});
this.getView().bindElement({
path: "/" + sObjectPath
});
}.bind(this));
var self = this;
var model = this.getOwnerComponent().getModel();
this.getModel().read("/CharSet", {
success: function (response) {
$.sap.Chars = response.results;
self.onStartSetVisibilities(model, self);
}
});
// self.getView().attachAfterRendering(function () {
// self.onStartSetVisibilities(model, self);
// });
}
OnStartSetVisibilities:
onStartSetVisibilities: function (model, self) {
var char = model.getProperty(ā€˛GeneralData/Char");
if (char !== "" || char !== null) {
model.setProperty(self.getView().getBindingContext().sPath + "/Responsible",
this.getResponsibleForChar(char));
}
}
I put together some code which might solve your issue (it's untested so it may contain syntax errors!).
I introduced the concept of Promises which simplifies the asynchronous behavior of JS. I also replaced some of the inner functions with Arrow functions so you don't have to deal with that or self. Arrow functions basically use the this of the scope they are defined within.
Your app should now have a proper flow.
First you bind the view.
After the view is bound you read the CharSet.
After the data is read you set the visibility stuff
onInit: function () {
const startupParameters = this.getOwnerComponent().getComponentData().startupParameters;
const sWorkitem = startupParameters.TASK[0];
this._bindView(sWorkitem)
.then(() => this._readCharSet())
.then(() => this._setVisibilities())
},
_bindView: function (sWorkitem) {
return new Promise((resolve) => {
const oModel = this.getOwnerComponent().getModel();
oModel.metadataLoaded().then(() => {
const sPath = "/" + oModel.createKey("DataSet", {
Workitem: sWorkitem
});
this.getView().bindElement({
path: sPath,
events: {
change: resolve,
dataReceived: resolve
}
});
});
});
},
_readCharSet: function () {
return new Promise((resolve) => {
const oModel = this.getOwnerComponent().getModel();
oModel.read("/CharSet", {
success: resolve
});
});
},
_setVisibilities: function () {
const oModel = this.getOwnerComponent().getModel();
const sChar = oModel.getProperty("GeneralData/Char");
if (sChar) {
// ...
}
}
I'm trying to upload a file using apollo-server-express and apollo-client. However, when the file object is passed to the resolver it is always empty. I can see the file on the client, but not the server side. How can I resolve this ?
My Schema
type File {
id: ID
path: String
filename: String
mimetype: String
}
extend type Query {
getFiles: [File]
}
extend type Mutation {
uploadSingleFile(file: Upload!): File
}
My Resolver
Mutation: {
uploadSingleFile: combineResolvers(
isAuthenticated,
async (parent, { file }, { models, user, storeUpload }, info) => {
console.log('Resolver-> uploadSingleFile')
console.log(file) // Will return empty, { }
const x = await file
console.log(x) // Will also return empty, { }
const storedFile = storeUpload(file)
return storedFile
}
),
},
My Client-side queries file
export const UPLOAD_SINGLE_FILE = gql`
mutation uploadSingleFile($file: Upload!) {
uploadSingleFile(file: $file) {
id
}
}
`
My Client-side interface
import React from 'react'
// GQL
import { useApolloClient, useMutation } from '#apollo/react-hooks'
import { UPLOAD_SINGLE_FILE } from '../../queries'
const FileUpload = props => {
const [uploadSingleFile, uploadSingleFileResult] = useMutation(UPLOAD_SINGLE_FILE, {
onCompleted(uploadSingleFile) {
console.log('Completed uploadSingleFile')
}
})
const apolloClient = useApolloClient()
const handleUploadFile = ({
target: {
validity,
files: [file]
}
}) => {
console.log('Uploading file...')
if(validity.valid) {
console.log('Valid')
console.log(file.name)
uploadSingleFile({ variables: { file } })
.then(() => {
apolloClient.resetStore()
})
}
else console.log('Invalid file')
}
return(
<input type="file" required onChange={handleUploadFile} />
)
}
export default FileUpload
UPDATED
My front-end set-up is:
const httpLink = createHttpLink({
uri: 'http://localhost:4000/graphql',
})
const authLink = setContext((_, { headers }) => {
const token = localStorage.getItem('token')
return {
headers: {
...headers,
authorization: token ? `Bearer ${token}` : "",
}
}
})
const client = new ApolloClient({
link: authLink.concat(httpLink),
cache: new InMemoryCache(),
})
You need to utilize the appropriate Link with your Apollo Client in order to enable file uploads. The easiest way to do that is by using createUploadLink from apollo-upload-client. It functions as a drop-in replacement for createHttpLink, so just swap out the functions and you'll be good to go.
const httpLink = createUploadLink({
uri: 'http://localhost:4000/graphql',
})
const authLink = ...
const client = new ApolloClient({
link: authLink.concat(httpLink),
cache: new InMemoryCache(),
})
Assuming you have the proper link set up and in use (using createUploadLink as Daniel mentions in his post), you should be able to destructure the props from file once you await the promise in your resolver on the server.
const { filename, mimetype, createReadStream } = await file.promise;
console.log(filename, mimetype);
// to get a stream to use of the data
const stream = createReadStream();
UPDATE: in more recent versions of graphql-upload you can just await the file like you do in the OP, rather than the file.promise. I was using an older version of the lib it seems.
I am migrating a Laravel app to Node app using TypeORM. Has anyone been able to implement something similar to Laravel's Polymorphic Relations in TypeOrm?
Example schema I am trying to reproduce:
export class Notification {
id: string;
attachable_id: number;
attachable_type: string;
}
I want to be able to to have a notification.attachable relation that could be of any type. Then, ideally, I can eager load a user with their last x notifications, with the attachable on each notification.
EDIT:
So I done a refactor/rewrite and put it all in a repo https://github.com/bashleigh/typeorm-polymorphic
So, I've been thinking of trying to implement something for this for a while. I had 2 days to implement something in a hurry so I made this crud thing.
import {
FindManyOptions,
DeepPartial,
ObjectID,
FindConditions,
UpdateResult,
Repository,
SaveOptions,
} from 'typeorm';
import { QueryDeepPartialEntity } from 'typeorm/query-builder/QueryPartialEntity';
export interface PolymorphicInterface {
entityId: string;
entityType: string;
}
export type PolyMorphicType<K> = PolymorphicInterface & DeepPartial<K>;
export const POLYMORPHIC_RELATIONSHIP = 'POLYMORPHIC_RELATIONSHIP';
export interface PolymorphicOptions {
type: Function;
parent: Function;
property: string | Symbol;
}
export const PolyMorphic = (type: Function): PropertyDecorator => (
target: Object,
propertyKey: string | Symbol,
): void =>
Reflect.defineMetadata(
`${POLYMORPHIC_RELATIONSHIP}::${propertyKey}`,
{
type,
parent: target.constructor.name,
property: propertyKey,
},
target,
);
export class PolymorphicRepository<T extends DeepPartial<T>> extends Repository<T> {
private getMetadata(): Array<PolymorphicOptions> {
let keys = Reflect.getMetadataKeys((this.metadata.target as Function)['prototype']);
if (!Array.isArray(keys)) {
return [];
}
keys = keys.filter((key: string) => {
const parts = key.split('::');
return parts[0] === POLYMORPHIC_RELATIONSHIP;
});
if (!keys) {
return [];
}
return keys.map(
(key: string): PolymorphicOptions =>
Reflect.getMetadata(key, (this.metadata.target as Function)['prototype']),
);
}
async find(findOptions?: FindConditions<T> | FindManyOptions<T>): Promise<T[]> {
const polymorphicMetadata = this.getMetadata();
if (Object.keys(polymorphicMetadata).length === 0) {
return super.find(findOptions);
}
const entities = await super.find(findOptions);
return this.hydratePolymorphicEntities(entities);
}
public async hydratePolymorphicEntities(entities: Array<T>): Promise<Array<T>> {
const metadata = this.getMetadata();
metadata.forEach(
async (data: PolymorphicOptions): Promise<void> => {
await Promise.all(
entities.map(
async (entity: T): Promise<void> => {
const repository = this.manager.getRepository(data.type);
const property = data.property;
const parent = data.parent;
if (!repository) {
throw new Error(
`Repository not found for type [${
data.type
}] using property [${property}] on parent entity [${parent}]`,
);
}
const morphValues = await repository.find({
where: {
//#ts-ignore
entityId: entity.id, // TODO add type AbstractEntity
entityType: this.metadata.targetName,
},
});
//#ts-ignore
entity[property] = morphValues;
},
),
);
},
);
return entities;
}
public async update(
criteria:
| string
| string[]
| number
| number[]
| Date
| Date[]
| ObjectID
| ObjectID[]
| FindConditions<T>,
partialEntity: QueryDeepPartialEntity<T>,
): Promise<UpdateResult> {
const polymorphicMetadata = this.getMetadata();
if (Object.keys(polymorphicMetadata).length === 0) {
return super.update(criteria, partialEntity);
}
const result = super.update(criteria, partialEntity);
// TODO update morphs
throw new Error("CBA I'm very tired");
return result;
}
public async save<E extends DeepPartial<T>>(
entity: E | Array<E>,
options?: SaveOptions & { reload: false },
): Promise<E & T | Array<E & T>> {
const polymorphicMetadata = this.getMetadata();
if (Object.keys(polymorphicMetadata).length === 0) {
return Array.isArray(entity) ? super.save(entity, options) : super.save(entity);
}
const result = Array.isArray(entity)
? await super.save(entity, options)
: await super.save(entity);
Array.isArray(result)
? await Promise.all(result.map((res: T) => this.saveMorphs(res)))
: await this.saveMorphs(result);
return result;
}
private async saveMorphs(entity: T): Promise<void> {
const metadata = this.getMetadata();
await Promise.all(
metadata.map(
async (data: PolymorphicOptions): Promise<void> => {
const repository: Repository<PolymorphicInterface> = this.manager.getRepository(
data.type,
);
const property = data.property;
const parent = data.parent;
const value: Partial<PolymorphicInterface> | Array<Partial<PolymorphicInterface>> =
//#ts-ignore
entity[property];
if (typeof value === 'undefined' || value === undefined) {
return new Promise(resolve => resolve());
}
if (!repository) {
throw new Error(
`Repository not found for type [${
data.type
}] using property [${property}] on parent entity [${parent}]`,
);
}
let result: Array<any> | any;
if (Array.isArray(value)) {
//#ts-ignore
result = await Promise.all(
value.map(val => {
// #ts-ignore
val.entityId = entity.id;
val.entityType = this.metadata.targetName;
return repository.save(
value instanceof data.type ? value : repository.create(value),
);
}),
);
} else {
// #ts-ignore
value.entityId = entity.id; // TODO resolve AbstractEntity for T
value.entityType = this.metadata.targetName;
result = await repository.save(
value instanceof data.type ? value : repository.create(value),
);
}
// #ts-ignore
entity[property] = result;
},
),
);
}
}
It's pretty rough but that's what I implemented to tackle this. Essentially I've implemented is my own methods to handle saving of entities that are defined within the metadata by creating my own repository.
Then you can use it like so
#Entity()
export class TestEntity {
#PolyMorphic(SomeOtherEntity)
property: SomeOtherEntity[];
}
The typings are really bad but that's only because I've had 1 days to implement this feature and I did it on the plane