in swagger ui 2.0 it was code
var basicAuth = new SwaggerClient.PasswordAuthorization("basicAuth", username, password);
window.swaggerUi.api.clientAuthorizations.add("basicAuth", basicAuth);
Can somebody provide code for version swagger ui 3.0?
Thanks.
Edit.
i`m trying to do something like this - Adding Basic Authorization for Swagger-UI
I`m using Swagger on server with Basic auth. SO i cant init library.
const ui = SwaggerUIBundle({
url: "http://petstore.swagger.io/v2/swagger.json",
dom_id: '#swagger-ui',
presets: [
SwaggerUIBundle.presets.apis,
// yay ES6 modules ↘
Array.isArray(SwaggerUIStandalonePreset) ? SwaggerUIStandalonePreset : SwaggerUIStandalonePreset.default
],
plugins: [
SwaggerUIBundle.plugins.DownloadUrl
],
layout: "StandaloneLayout"
})
window.ui = ui
without basic auth everything works fine.
basic auth enabled - http://prntscr.com/enxee4
In Swagger UI 3.x, fetching specs (.yaml/.json files) protected with Basic auth / API keys is supported in ver. 3.3.2 and later. In your Swagger UI initialization code, define a requestinterceptor that attaches auth headers to the spec fetch request:
<!-- index.html -->
const ui = SwaggerUIBundle({
url: "http://petstore.swagger.io/v2/swagger.json",
requestInterceptor: (req) => {
if (req.loadSpec) {
// Fetch the spec using Basic auth, replace "user" and "password" with yours
req.headers.Authorization = 'Basic ' + btoa('user:password');
// or API key
// req.headers.MyApiKey = 'abcde12345';
// or bearer token
// req.headers.Authorization = 'Bearer abcde12345';
}
return req;
},
...
})
I build an index.html with a simple form to fill user credentials to get a session id. Then redirect to swagger.html, for instance, and make some changes.
Before window.onload:
var orgFetch;
window.setExtraHeader = function(sessionId) {
var system = window.ui.getSystem();
if(!system) return;
if(!orgFetch) {
orgFetch = system.fn.fetch;
}
system.fn.fetch = function(obj) {
if(!obj) return;
if(!obj.headers) {
obj.headers = {};
}
obj.headers['sessionId'] = sessionId;
return orgFetch(obj)
.then(function(fetchRes) {
return fetchRes;
});
}
system.specActions.download();
}
and then:
window.ui = ui;
window.setExtraHeader(localStorage.getItem("sessionId"));
Source: https://github.com/swagger-api/swagger-ui/issues/2793
Hope this helps.
In swagger-ui 3.2x, to manually set Authorization header based on values entered in the authorize popup for basic authentication, we can use below code.
const ui = SwaggerUIBundle({
dom_id: '#swagger-ui',
requestInterceptor: (req) => {
if (!req.loadSpec) {
var authorized = this.ui.authSelectors.authorized();
//'Basic_Authentication' is security scheme key for basic authentication in the OpenApi file
var basicAuth = getEntry(authorized, 'Basic_Authentication');
if (basicAuth) {
var basicAuthValue = getEntry(basicAuth, 'value');
if (basicAuthValue) {
var username = getEntry(basicAuthValue, 'username');
var password = getEntry(basicAuthValue, 'password');
if (username && password) {
req.headers.Authorization = "Basic " + btoa(username + ":" + password);
}
}
}
}
return req;
}
//traverse through the object structure of swagger-ui authorized object to get value for an entryName
function getEntry(complexObj, entryName) {
if (complexObj && complexObj._root && complexObj._root.entries) {
var objEntries = complexObj._root.entries;
for (var t = 0; t < objEntries.length; t++) {
var entryArray = objEntries[t];
if (entryArray.length > 1) {
var name = entryArray[0];
if (name === entryName) {
return entryArray[1];
}
}
}
}
return null;
}
Related
We are seeing an issue with users unable to access our production and PPE apps via LinkedIn sign in. The redirection is not happening to specified redirect URL once users provides user name and password. The network trace shows login is successful but not going to redirect URL. This has been working last 4 years or so and suddenly started failing in both environments from yesterday.
Bummer. Something went wrong
We tried verifying the network trace and a support case is raised to LinkedIn with recording. Finally we are redirected to raise the issue here.
I had the same issue and found that it was caused by using JSON.stringify to "overload" the state parameter with other parameters. In my case, I add other parameters in the following way:
providerCfg.auth_params.state = JSON.stringify({
state: providerCfg.auth_params.state,
redirectPageUrl,
redirectParams,
userTypeBit,
isLogin
})
const authUrl = new URL(providerCfg.auth_url)
Object.entries(providerCfg.auth_params).forEach(([key, val]) => {
authUrl.searchParams.append(key, encodeURIComponent(val))
})
return buildURL(providerCfg.auth_url, providerCfg.auth_params)
When I removed the call to JSON.stringify and just passed in a state parameter, the oauth flow worked correctly. Obviously, the other parameters that I passed in were important so I created my own functions to serialize and deserialize the values. The code below works well for anything other than deeply nested objects. You will need to update the metaDataCfg based on your own requirements.
const META_STRING_DELIMITER = '|'
const serializeBasicObject = (targetObj) => {
if (!targetObj) {
return ''
}
return Object.entries(targetObj).reduce((objString, [key, val]) => {
const param = `${key}=${val || ''}`
if (!objString.length) {
return param
}
return `${objString}${META_STRING_DELIMITER}${param}`
}, '')
}
const deserializeBasicObject = (targetStr) => {
if (!targetStr) {
return ''
}
const keyValPairs = targetStr.split(META_STRING_DELIMITER)
return keyValPairs.reduce((targetObj, keyValPair) => {
const splitIdx = keyValPair.indexOf('=')
const key = keyValPair.slice(0, splitIdx)
targetObj[key] = keyValPair.slice(splitIdx + 1, keyValPair.length)
return targetObj
}, {})
}
const metaDataCfg = {
state: {},
redirectPageUrl: {},
redirectParams: {
serialize: serializeBasicObject,
deserialize: deserializeBasicObject
},
userTypeBit: { deserialize: Number },
isLogin: { deserialize: dataUtil.getBoolean }
}
const getMetaString = (metaData) => {
return Object.entries(metaDataCfg).reduce((metaString, [metaDataKey, cfg]) => {
const val = (cfg.serialize) ? cfg.serialize(metaData[metaDataKey]) : metaData[metaDataKey]
const param = `${metaDataKey}=${dataUtil.isNil(val) ? '' : val}`
if (!metaString.length) {
return param
}
return `${metaString}${META_STRING_DELIMITER}${param}`
}, '')
}
export const getDataFromMetaString = (metaString) => {
const params = metaString.split(META_STRING_DELIMITER)
const data = params.reduce((metaData, param) => {
const splitIdx = param.indexOf('=')
const key = param.slice(0, splitIdx)
let val = param.slice(splitIdx + 1, param.length)
if (dataUtil.isNil(val) || !val.length) {
return metaData
}
const deserializer = metaDataCfg[key].deserialize
if (deserializer && val) {
val = deserializer(val)
}
metaData[key] = val
return metaData
}, {})
return data
}
Every time I make a call to acquireToken, it keeps launching the AAD login window and prompts me for a username/password, even though I've already authenticated successfully and consumed an access token to make API calls.
Here is my code
Step 1. Call the loadData function from controller
loadData = (): Rx.IPromise<Array<UserResult>> => {
var url = this.xxxApiUrl;
return Http.get<Array<UserResult>>(this._$http, url);
};
Step -2
export function get<TResult>(http: ng.IHttpService, url: string,
ignoreLoadingBar: boolean = false, retryCount = 0): Rx.IPromise<TResult> {
var req: any = {};
if (ignoreLoadingBar) {
req.ignoreLoadingBar = ignoreLoadingBar;
}
let resObservable = Rx.Observable.create(subscriber => {
acquireToken(url, (message, token) => {
req.headers.Authorization = `Bearer ${token}`;
http.get(url, req)
.then(res => {
subscriber.onNext(res.data);
subscriber.onCompleted();
}, (err) => { alert(JSON.stringify(err)); });
});
});
return resObservable.toPromise();
}
function acquireToken(apiUrl: string, callback) {
let innerCallback = (res) => callback('', res.accessToken);
let xConfig= JSON.parse(<any>sessionStorage.getItem('xConfig'));
window.AuthenticationContext = new
window.Microsoft.ADAL.AuthenticationContext
(xConfig.common.azure.authorityTenant);
window.AuthenticationContext.tokenCache.readItems().then(items => {
if (items.length > 0) {
let authority = items[0].authority;
window.AuthenticationContext = new
window.Microsoft.ADAL.AuthenticationContext(authority);
}
let resourceUri = getResourceUri(xConfig, apiUrl);
window.AuthenticationContext.acquireTokenSilentAsync(resourceUri,
xConfig.common.azure.clientId, xConfig.common.azure.redirectUri)
.then(innerCallback, (err) => {
window.AuthenticationContext.acquireTokenAsync(resourceUri,
xConfig.common.azure.clientId, xConfig.common.azure.redirectUri)
.then(innerCallback);
});
});
}
Looking at your code, it looks like that you are using acquireTokenSilentAsync using the common endpoint, this is not supported. Please make sure to use your tenant Id or name (like tenant.onmicrosoft.com) instead of common when using acquireTokenSilentAsync
For more information about the common endpoint please see here
I'm developing an app in Nativescript for the first time and running into an issue where AJAX calls work on Android but not iOS. I have a login.js file which requires a user-view-model (user-view-model.js), and when I test the code on Android it takes me to the "home" page but it hits the catch function on iOS.
login.js:
var dialogsModule = require("ui/dialogs");
var UserViewModel = require("../../shared/view-models/user-view-model");
var applicationSettings = require("application-settings");
var user = new UserViewModel({
email: "aaa#aaa.com",
password: "aaa"
});
var frameModule = require("ui/frame");
var page;
exports.loaded = function(args) {
page = args.object;
page.bindingContext = user;
};
exports.login = function () {
user.login().catch(function(error) {
dialogsModule.alert({
message: "Unfortunately we could not find your account.",
okButtonText: "OK"
});
return Promise.reject();
}).then(function(response) {
console.dir(response)
console.log("past response")
applicationSettings.setString("user_id", response.user_id);
applicationSettings.setString("first_name", response.first_name);
applicationSettings.setString("last_name", response.last_name);
applicationSettings.setString("user_type", response.user_type);
var topmost = frameModule.topmost();
topmost.navigate("views/home/home");
});
};
user-view-model.js:
var config = require("../../shared/config");
var fetchModule = require("fetch");
var observableModule = require("data/observable");
var http = require("http");
function User(info) {
info = info || {};
var viewModel = new observableModule.fromObject({
email: info.email || "",
password: info.password || ""
});
viewModel.login = function() {
let loginEmail = JSON.stringify(this.get("email")).replace(/['"]+/g, '');
let loginPassword = JSON.stringify(this.get("password")).replace(/['"]+/g, '');
console.log(loginEmail, loginPassword);
let loginUrl = config.serverPHPServiceUrl + "Login.php?user_id=" + loginEmail + "&password=" + loginPassword;
console.log(loginUrl);
// I tried this way first and wasn't able to login on iOS, which made me try the second method below.
// return fetchModule.fetch(loginUrl, {
// method: "POST",
// headers: {
// "Content-Type": "application/json"
// }
// }).then(handleErrors).then(function(response) {
// return response.json();
// }).then(function(data) {
// console.dir(data);
// console.log(data["results"][0]["user_id"])
// return data["results"][0];
// });
// This method works on Android but not iOS.
return http.getJSON(loginUrl).then(function(response) {
console.dir(response);
return response.results[0];
})
};
return viewModel;
};
function handleErrors(response) {
console.log("in errors")
if (!response.ok) {
console.log(JSON.stringify(response));
throw Error(response.statusText);
}
return response;
}
module.exports = User;
Is there anything fundamentally wrong with my code, or do asynchronous calls work differently on iOS vs Android in Nativescript? I did the Grocery tutorial and didn't run into this issue, so I didn't think this was the case. Does it matter that the backend is using PHP?
I fixed my issue: I started a new project with Angular 2 and ran into the same error, but then it gave me the error message "Error: The resource could not be loaded because the App Transport Security policy requires the use of a secure connection." I solved it by adding "https" to my url call, but this post has another solution.
I want to monitor my dataflow jobs with an application. The application I'm developing is a nodejs application and ideally it would exist a package like #google-cloud/bigquery but for dataflow instead. I'm fully aware that I might not be able to start job, if it is not a template job, but it should be an easy way to list jobs or get job information.
Update:
I found this spec, https://dataflow.googleapis.com/$discovery/rest?version=v1b3, but I don't understand what location is for the list operation. The spec was linked from this page: https://cloud.google.com/dataflow/docs/reference/rest/
I did find the solution myself. There is a repo that basically has all the APIs for gcloud out there: https://github.com/google/google-api-nodejs-client
After I found that I could easily do what I wanted:
'use strict';
var google = require('googleapis');
var dataflow = google.dataflow('v1b3');
google.auth.getApplicationDefault(function (err, authClient, projectId) {
if (err) {
throw err;
}
// The createScopedRequired method returns true when running on GAE or a local developer
// machine. In that case, the desired scopes must be passed in manually. When the code is
// running in GCE or a Managed VM, the scopes are pulled from the GCE metadata server.
// See https://cloud.google.com/compute/docs/authentication for more information.
if (authClient.createScopedRequired && authClient.createScopedRequired()) {
// Scopes can be specified either as an array or as a single, space-delimited string.
authClient = authClient.createScoped([
'https://www.googleapis.com/auth/compute'
]);
}
// Fetch the list of GCE zones within a project.
// NOTE: You must fill in your valid project ID before running this sample!
var compute = google.compute({
version: 'v1',
auth: authClient
});
var result = dataflow.projects.jobs.list({
'projectId': projectId,
'auth': authClient
}, function (err, result) {
console.log(err, result);
});
});
For posterity . . . there is a way to do this without a client library, but it requires generating a jwt from service account credentials and exchanging the jwt for an access token to execute a Dataflow template. This example uses the Cloud_Bigtable_to_GCS_Avro template:
import axios from "axios";
import jwt from "jsonwebtoken";
import mem from "mem";
const loadCredentials = mem(function() {
// This is a string containing service account credentials
const serviceAccountJson = process.env.GOOGLE_APPLICATION_CREDENTIALS;
if (!serviceAccountJson) {
throw new Error("Missing GCP Credentials");
}
const credentials = JSON.parse(serviceAccountJson.replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t"));
return {
projectId: credentials.project_id,
privateKeyId: credentials.private_key_id,
privateKey: credentials.private_key,
clientEmail: credentials.client_email,
};
});
interface ProjectCredentials {
projectId: string;
privateKeyId: string;
privateKey: string;
clientEmail: string;
}
function generateJWT(params: ProjectCredentials) {
const scope = "https://www.googleapis.com/auth/cloud-platform";
const authUrl = "https://www.googleapis.com/oauth2/v4/token";
const issued = new Date().getTime() / 1000;
const expires = issued + 60;
const payload = {
iss: params.clientEmail,
sub: params.clientEmail,
aud: authUrl,
iat: issued,
exp: expires,
scope: scope,
};
const options = {
keyid: params.privateKeyId,
algorithm: "RS256",
};
return jwt.sign(payload, params.privateKey, options);
}
async function getAccessToken(credentials: ProjectCredentials): Promise<string> {
const jwt = generateJWT(credentials);
const authUrl = "https://www.googleapis.com/oauth2/v4/token";
const params = {
grant_type: "urn:ietf:params:oauth:grant-type:jwt-bearer",
assertion: jwt,
};
try {
const response = await axios.post(authUrl, params);
return response.data.access_token;
} catch (error) {
console.error("Failed to get access token", error);
throw error;
}
}
function buildTemplateParams(projectId: string, table: string) {
return {
jobName: `[job-name]`,
parameters: {
bigtableProjectId: projectId,
bigtableInstanceId: "[table-instance]",
bigtableTableId: table,
outputDirectory: `[gs://your-instance]`,
filenamePrefix: `${table}-`,
},
environment: {
zone: "us-west1-a" // omit or define your own,
tempLocation: `[gs://your-instance/temp]`,
},
};
}
async function backupTable(table: string) {
console.info(`Executing backup template for table=${table}`);
const credentials = loadCredentials();
const { projectId } = credentials;
const accessToken = await getAccessToken(credentials);
const baseUrl = "https://dataflow.googleapis.com/v1b3/projects";
const templatePath = "gs://dataflow-templates/latest/Cloud_Bigtable_to_GCS_Avro";
const url = `${baseUrl}/${projectId}/templates:launch?gcsPath=${templatePath}`;
const template = buildTemplateParams(projectId, table);
try {
const response = await axios.post(url, template, {
headers: { Authorization: `Bearer ${accessToken}` },
});
console.log("GCP Response", response.data);
} catch (error) {
console.error(`Failed to execute template for ${table}`, error.message);
}
}
async function run() {
await backupTable("my-table");
}
try {
run();
} catch (err) {
process.exit(1);
}
I found two different behaviors for Titanium HTTPClient in sdk 1.7.5 and 1.8.0.1
Here's my complete code:
function Synchronizer () {
//server calls
var callStack = [{
url: 'http://url1',
name: 'foo1'
},
{
url: 'http://url2',
name: 'foo2'
},
{
url: 'http://url3',
name: 'foo3'
},
{
url: 'http://url4',
name: 'foo4'
}
];
//processing data
var dataStack = new Array();
//call stack pointer
var stackPointer = 0;
//HTTPClient
var httpClient = Titanium.Network.createHTTPClient({
onerror:function(e){
UIManager.SynchronizeUI.onError();
},
ondatastream:function(e) {
UIManager.SynchronizeUI.setProgressBarValue(stackPointer);
}
});
//....
//DataConsumer
var DataConsumer = new DataConsumer();
//Method for synchronization
this.synchronizeAll = function (usr, pwd) {
if(!Ti.Network.online){
Ti.API.info('Not connected to the internet');
return;
}
Ti.API.info('Avvia syncro');
stackPointer = 0;
//Autentication call
httpClient.open('POST', 'http://url/login');
httpClient.onload = function() {
// If authenticated
if(this.responseText.trim() == ('OK,' + usr)) {
Ti.API.info('Login successful');
consumeCallStack();
}
//else
else {
//...
}
}
httpClient.send({username: usr, password: pwd, ajaxlogin: 'true'});
Ti.API.info('Calling login..');
}
/*
* stack consumer
*/
function consumeCallStack() {
Ti.API.info('Execute call ' + stackPointer);
httpClient.open('GET', callStack[stackPointer].url);
httpClient.onload = function(){
alert(httpClient.responseText);
//data
var data = JSON.parse(this.responseText);
Ti.API.info('Retrieved data for ' + callStack[stackPointer].name);
//..
}
httpClient.send();
}
HTTPClient retrieves the data and the on error function isn't involved.
The problem is that after being authenticated the HTTPClient second call gets the JSON that I want if running with 1.7.5 while gets the same successful login page of the first call with 1.8.0.1. May be a cookie not enabled problem? Thank you very much!
Use the release notes to see which bug fixes relate to your issue:
Titanium 1.8.0.1 Release Notes
httpclient sync mode
iOS: Http request sent twice on ios 5