nicUpload says "Invalid Upload ID", cant make it works - upload

Im trying to implement nicEdit with the nicupload plugin, but when I select a file to upload it says "Failed to upload image", and the server response says "Invalid Upload ID".
This is the code that calls the script and initializes:
<script src="http://js.nicedit.com/nicEdit-latest.js" type="text/javascript"></script>
<script type="text/javascript">//<![CDATA[
bkLib.onDomLoaded(function() {
new nicEditor({uploadURI : '../../nicedit/nicUpload.php'}).panelInstance('area1');
});
//]]>
</script>
The path to nicUpload.php is correct, and the code is the one that can be found in the documentation: http://nicedit.com/src/nicUpload/nicUpload.js
I made the upload folder changes, and set write permissions. According to the documentation (http://wiki.nicedit.com/w/page/515/Configuration%20Options), thats all, but i keep getting errors. Any ideas?

After looking for an solution a long time (lot of posts without real solution), i now fixed the code myself. I'm now able to upload an image to my own server. Thx to firebug and eclipse ;-)
The main problem is that the nicUpload.php is old and not working with the current nicEdit-Upload function.
Missing is the error handling, feel free to add this...
Add the nicEditor to your php file and configure it to use the nicEdit.php:
new nicEditor({iconsPath : 'pics/nicEditorIcons.gif', uploadURI : 'script/nicUpload.php'}
Download the nicEdit.js uncompressed and change the following lines in nicEdit.js:
uploadFile : function() {
var file = this.fileInput.files[0];
if (!file || !file.type.match(/image.*/)) {
this.onError("Only image files can be uploaded");
return;
}
this.fileInput.setStyle({ display: 'none' });
this.setProgress(0);
var fd = new FormData();
fd.append("image", file);
fd.append("key", "b7ea18a4ecbda8e92203fa4968d10660");
var xhr = new XMLHttpRequest();
xhr.open("POST", this.ne.options.uploadURI || this.nicURI);
xhr.onload = function() {
try {
var res = JSON.parse(xhr.responseText);
} catch(e) {
return this.onError();
}
//this.onUploaded(res.upload); // CHANGE HERE
this.onUploaded(res);
}.closure(this);
xhr.onerror = this.onError.closure(this);
xhr.upload.onprogress = function(e) {
this.setProgress(e.loaded / e.total);
}.closure(this);
xhr.send(fd);
},
onUploaded : function(options) {
this.removePane();
//var src = options.links.original; // CHANGE HERE
var src = options['url'];
if(!this.im) {
this.ne.selectedInstance.restoreRng();
//var tmp = 'javascript:nicImTemp();';
this.ne.nicCommand("insertImage", src);
this.im = this.findElm('IMG','src', src);
}
var w = parseInt(this.ne.selectedInstance.elm.getStyle('width'));
if(this.im) {
this.im.setAttributes({
src : src,
width : (w && options.image.width) ? Math.min(w, options.image.width) : ''
});
}
}
Change the nicUpload.php like this
<?php
/* NicEdit - Micro Inline WYSIWYG
* Copyright 2007-2009 Brian Kirchoff
*
* NicEdit is distributed under the terms of the MIT license
* For more information visit http://nicedit.com/
* Do not remove this copyright message
*
* nicUpload Reciever Script PHP Edition
* #description: Save images uploaded for a users computer to a directory, and
* return the URL of the image to the client for use in nicEdit
* #author: Brian Kirchoff <briankircho#gmail.com>
* #sponsored by: DotConcepts (http://www.dotconcepts.net)
* #version: 0.9.0
*/
/*
* #author: Christoph Pahre
* #version: 0.1
* #description: different modification, so that this php file is working with the newest nicEdit.js (needs also modification - #see)
* #see http://stackoverflow.com/questions/11677128/nicupload-says-invalid-upload-id-cant-make-it-works
*/
define('NICUPLOAD_PATH', '../images/uploadedImages'); // Set the path (relative or absolute) to
// the directory to save image files
define('NICUPLOAD_URI', '../images/uploadedImages'); // Set the URL (relative or absolute) to
// the directory defined above
$nicupload_allowed_extensions = array('jpg','jpeg','png','gif','bmp');
if(!function_exists('json_encode')) {
die('{"error" : "Image upload host does not have the required dependicies (json_encode/decode)"}');
}
if($_SERVER['REQUEST_METHOD']=='POST') { // Upload is complete
$file = $_FILES['image'];
$image = $file['tmp_name'];
$id = $file['name'];
$max_upload_size = ini_max_upload_size();
if(!$file) {
nicupload_error('Must be less than '.bytes_to_readable($max_upload_size));
}
$ext = strtolower(substr(strrchr($file['name'], '.'), 1));
#$size = getimagesize($image);
if(!$size || !in_array($ext, $nicupload_allowed_extensions)) {
nicupload_error('Invalid image file, must be a valid image less than '.bytes_to_readable($max_upload_size));
}
$filename = $id;
$path = NICUPLOAD_PATH.'/'.$filename;
if(!move_uploaded_file($image, $path)) {
nicupload_error('Server error, failed to move file');
}
$status = array();
$status['done'] = 1;
$status['width'] = $size[0];
$rp = realpath($path);
$status['url'] = NICUPLOAD_URI ."/".$id;
nicupload_output($status, false);
exit;
}
// UTILITY FUNCTIONS
function nicupload_error($msg) {
echo nicupload_output(array('error' => $msg));
}
function nicupload_output($status, $showLoadingMsg = false) {
$script = json_encode($status);
$script = str_replace("\\/", '/', $script);
echo $script;
exit;
}
function ini_max_upload_size() {
$post_size = ini_get('post_max_size');
$upload_size = ini_get('upload_max_filesize');
if(!$post_size) $post_size = '8M';
if(!$upload_size) $upload_size = '2M';
return min( ini_bytes_from_string($post_size), ini_bytes_from_string($upload_size) );
}
function ini_bytes_from_string($val) {
$val = trim($val);
$last = strtolower($val[strlen($val)-1]);
switch($last) {
// The 'G' modifier is available since PHP 5.1.0
case 'g':
$val *= 1024;
case 'm':
$val *= 1024;
case 'k':
$val *= 1024;
}
return $val;
}
function bytes_to_readable( $bytes ) {
if ($bytes<=0)
return '0 Byte';
$convention=1000; //[1000->10^x|1024->2^x]
$s=array('B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB');
$e=floor(log($bytes,$convention));
return round($bytes/pow($convention,$e),2).' '.$s[$e];
}
?>

You can manually pass an id to your script: e.g nicUpload.php?id=introPicHeader and it will become introPicHeader.jpg (or appropriate extension) in the images folder you defined.
However, I have noticed that this script is broken and cannot access the configuration option uploadURI if specified directly in nicEdit.js during the nicEditorAdvancedButton.extend({. This causes access to an relatively pathed "Unknown" resource, causing an error.
The documentation implies otherwise and the fact that the nicURI was specified here for imgur.com (maybe as a default) gave me the impression I could also add a uploadURI reference to the nicUpload.php script in a single place rather than on every editor instantiation.
Update
This works if you pass it during instantiation, which I guess does allow for easy dynamic id population.
Unfortunately, the nicUpload.php is riddled with errors and it's output is not JSON. The editor expects to parse JSON and finds a script tag and errors with unexpected token "<".
There are a raft of other errors which I will attempt to identify:
In nicEdit.js
A.append("image") should be infact A.append("nicImage")
this.onUploaded(D.upload) should become this.onUploaded(D)
this.onUploaded(D) should be moved to within the try block after var D=JSON.parse(C.responseText) to fix variable scope issues
B.image.width needs to become B.width
In nicUpload.php
JSON output is not formed correctly, comment out html output and output just json_encode($status).
JSON output needs to return a key/value pair named links rather than url although renaming the var D=B.links to var D=B.url in nicEdit.js would also suffice as a fix.
Both php and javascript code leaves a lot to be desired, I get many errors regularly and have been fixing them myself.

Related

Saxon CS: transform.doTransform cannot find out file from first transformation on windows machine but can on mac

I am creating an azure function application to validate xml files using a zip folder of schematron files.
I have run into a compatibility issue with how the URI's for the files are being created between mac and windows.
The files are downloaded from a zip on azure blob storage and then extracted to the functions local storage.
When the a colleague runs the transform method of the saxon cs api on a windows machine the method is able to run the first transformation and produce the stage 1.out file, however on the second transformation the transform method throws an exception stating that it cannot find the file even though it is present on the temp directory.
On mac the URI is /var/folders/6_/3x594vpn6z1fjclc0vx4v89m0000gn/T and on windows it is trying to find it at file:///C:/Users/44741/AppData/Local/Temp/ but the library is unable to find the file on the windows machine even if it is moved out of temp storage.
Unable to retrieve URI file:///C:/Users/44741/Desktop/files/stage1.out
The file is present at this location but for some reason the library cannot pick it up on the windows machine but it works fine on my mac. I am using Path.Combine to build the URI.
Has anyone else ran into this issue before?
The code being used for the transformations is below.
{
try
{
var transform = new Transform();
transform.doTransform(GetTransformArguments(arguments[Constants.InStage1File],
arguments[Constants.SourceDir] + "/" + schematronFile, arguments[Constants.Stage1Out]));
transform.doTransform(GetTransformArguments(arguments[Constants.InStage2File], arguments[Constants.Stage1Out],
arguments[Constants.Stage2Out]));
transform.doTransform(GetFinalTransformArguments(arguments[Constants.InStage3File], arguments[Constants.Stage2Out],
arguments[Constants.Stage3Out]));
Log.Information("Stage 3 out file written to : " + arguments[Constants.Stage3Out]);;
return true;
}
catch (FileNotFoundException ex)
{
Log.Warning("Cannot find files" + ex);
return false;
}
}
private static string[] GetTransformArguments(string xslFile, string inputFile, string outputFile)
{
return new[]
{
"-xsl:" + xslFile,
"-s:" + inputFile,
"-o:" + outputFile
};
}
private static string[] GetFinalTransformArguments(string xslFile, string inputFile, string outputFile)
{
return new[]
{
"-xsl:" + xslFile,
"-s:" + inputFile,
"-o:" + outputFile,
"allow-foreign=true",
"generate-fired-rule=true"
};
}```
So assuming the intermediary results are not needed as files but you just want the result (I assume that is the Schematron schema compiled to XSLT) you could try to run XSLT 3.0 using the API of SaxonCS (using Saxon.Api) by compiling and chaining your three stylesheets with e.g.
using Saxon.Api;
string isoSchematronDir = #"C:\SomePath\SomeDir\iso-schematron-xslt2";
string[] isoSchematronXslts = { "iso_dsdl_include.xsl", "iso_abstract_expand.xsl", "iso_svrl_for_xslt2.xsl" };
Processor processor = new(true);
var xsltCompiler = processor.NewXsltCompiler();
var baseUri = new Uri(Path.Combine(isoSchematronDir, isoSchematronXslts[2]));
xsltCompiler.BaseUri = baseUri;
var isoSchematronStages = isoSchematronXslts.Select(xslt => xsltCompiler.Compile(new Uri(baseUri, xslt)).Load30()).ToList();
isoSchematronStages[2].SetStylesheetParameters(new Dictionary<QName, XdmValue>() { { new QName("allow-foreign"), new XdmAtomicValue(true) } });
using (var schematronIs = File.OpenRead("price.sch"))
{
using (var compiledOs = File.OpenWrite("price.sch.xsl"))
{
isoSchematronStages[0].ApplyTemplates(
schematronIs,
isoSchematronStages[1].AsDocumentDestination(
isoSchematronStages[2].AsDocumentDestination(processor.NewSerializer(compiledOs)
)
);
}
}
If you only need the compiled Schematron to apply it further to validate an XML instance document against that Schematron you could even store the Schematron as an XdmDestination whose XdmNode you feed to XsltCompiler e.g.
using Saxon.Api;
string isoSchematronDir = #"C:\SomePath\SomeDir\iso-schematron-xslt2";
string[] isoSchematronXslts = { "iso_dsdl_include.xsl", "iso_abstract_expand.xsl", "iso_svrl_for_xslt2.xsl" };
Processor processor = new(true);
var xsltCompiler = processor.NewXsltCompiler();
var baseUri = new Uri(Path.Combine(isoSchematronDir, isoSchematronXslts[2]));
xsltCompiler.BaseUri = baseUri;
var isoSchematronStages = isoSchematronXslts.Select(xslt => xsltCompiler.Compile(new Uri(baseUri, xslt)).Load30()).ToList();
isoSchematronStages[2].SetStylesheetParameters(new Dictionary<QName, XdmValue>() { { new QName("allow-foreign"), new XdmAtomicValue(true) } });
var compiledSchematronXslt = new XdmDestination();
using (var schematronIs = File.OpenRead("price.sch"))
{
isoSchematronStages[0].ApplyTemplates(
schematronIs,
isoSchematronStages[1].AsDocumentDestination(
isoSchematronStages[2].AsDocumentDestination(compiledSchematronXslt)
)
);
}
var schematronValidator = xsltCompiler.Compile(compiledSchematronXslt.XdmNode).Load30();
using (var sampleIs = File.OpenRead("books.xml"))
{
schematronValidator.ApplyTemplates(sampleIs, processor.NewSerializer(Console.Out));
}
The last example writes the XSLT/Schematron validation SVRL output to the console but could of course also write it to a file.

Safari audio HTML5 issue

I'm working on an iOS/MacOS issue.
I'm trying to play a simple audio file through a HTML5 audio element, and everything is working as expected except on iOS/MacOS.
My app is built on Symfony, using Nginx and custom PHP controllers to serve the audio resource. The audio element's given source is a link to a route that handles the streaming.
My logical is the following :
Get the file from url ID
Read request headers to find expected range and length
Setting up the good headers to force 206 Partial Content behaviour
Writing expected file chunks and returning a 206 response.
Here is the code for this:
public function getAudio(Request $request, $audioId)
{
/*
* Getting the file
*/
$em = $this->getDoctrine()->getManager();
$voice = $em->getRepository("CoreBundle:Audio")->findOneById($audioId);
$media = $audio->getFile();
$path = $this->container->get('sonata.media.twig.extension')->path($media, 'reference');
$file = $this->get('kernel')->getRootDir() . "/../web$path";
/*
* Setting length and offset for serving the right chunk
*/
$offset = 0;
$requestedRange = $request->headers->get("Range");
$size = $media->getSize();
$length = $size;
if($requestedRange){
preg_match('/bytes=(\d+)-(\d+)?/', $requestedRange, $matches);
$offset = intval($matches[1]);
if(count($matches) > 2){
$length = intval($matches[2]) - $offset + 1;
}
else{
$length = ($size) - $offset;
}
}
if(file_exists($file) && $length) {
$file = fopen($file, 'r');
fseek($file, $offset);
$data = fread($file, $length);
fclose($file);
if($requestedRange){
header('Content-Range: bytes ' . $offset . '-' . ($offset + $length-1) . '/' . $size);
}
else{
header('Content-Range: bytes 0-'.($size-1).'/'.$size);
}
/*
* Forcing some headers in the response to fit request attempts
*/
header("Pragma: public");
header("Expires: 0");
header('Content-type: audio/mpeg');
header('Content-Disposition: inline');
header('Accept-Ranges: bytes');
header('Content-Length: '.$length);
header('X-Pad: avoid browser bug');
header('Cache-Control: no-cache');
print($data);
return new Response("",206);
}
else{
return new Response("",200);
}
}
Again, everything is working fine on Firefox and Chrome, file is streamed and played perfectly.
But when it comes to IOS and MacOS, the file just doesn't play. A peek at network console shows two subsequent requests, with the last one in error (see next links).
However, no error is logged in PHP and I'm now completely stuck.
first-request
second-request
The logical was fine but the management of the response wasn't.
After reading the Symfony documentation about Response, I just changed every header() function to:
$response->setHeaders()
And every print($data) to:
$response = new Response($data);
Returning the complete Response object did the job.

List all indexeddb for one host in firefox addon

I figured if the devtool can list all created IndexedDB, then there should be an API to retrieve them...?
Dose anyone know how I get get a list of names with the help of a firefox SDK?
I did dig into the code and looked at the source. unfortunately there wasn't any convenient API that would pull out all the databases from one host.
The way they did it was to lurk around in the user profiles folder and look at all folder and files for .sqlite and make a sql query (multiple times in case there is an ongoing transaction) to each .sqlite and ask for the database name
it came down this peace of code
// striped down version of: https://dxr.mozilla.org/mozilla-central/source/devtools/server/actors/storage.js
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {async} = require("resource://gre/modules/devtools/async-utils");
const { setTimeout } = require("sdk/timers");
const promise = require("sdk/core/promise");
// A RegExp for characters that cannot appear in a file/directory name. This is
// used to sanitize the host name for indexed db to lookup whether the file is
// present in <profileDir>/storage/default/ location
const illegalFileNameCharacters = [
"[",
// Control characters \001 to \036
"\\x00-\\x24",
// Special characters
"/:*?\\\"<>|\\\\",
"]"
].join("");
const ILLEGAL_CHAR_REGEX = new RegExp(illegalFileNameCharacters, "g");
var OS = require("resource://gre/modules/osfile.jsm").OS;
var Sqlite = require("resource://gre/modules/Sqlite.jsm");
/**
* An async method equivalent to setTimeout but using Promises
*
* #param {number} time
* The wait time in milliseconds.
*/
function sleep(time) {
let deferred = promise.defer();
setTimeout(() => {
deferred.resolve(null);
}, time);
return deferred.promise;
}
var indexedDBHelpers = {
/**
* Fetches all the databases and their metadata for the given `host`.
*/
getDBNamesForHost: async(function*(host) {
let sanitizedHost = indexedDBHelpers.getSanitizedHost(host);
let directory = OS.Path.join(OS.Constants.Path.profileDir, "storage",
"default", sanitizedHost, "idb");
let exists = yield OS.File.exists(directory);
if (!exists && host.startsWith("about:")) {
// try for moz-safe-about directory
sanitizedHost = indexedDBHelpers.getSanitizedHost("moz-safe-" + host);
directory = OS.Path.join(OS.Constants.Path.profileDir, "storage",
"permanent", sanitizedHost, "idb");
exists = yield OS.File.exists(directory);
}
if (!exists) {
return [];
}
let names = [];
let dirIterator = new OS.File.DirectoryIterator(directory);
try {
yield dirIterator.forEach(file => {
// Skip directories.
if (file.isDir) {
return null;
}
// Skip any non-sqlite files.
if (!file.name.endsWith(".sqlite")) {
return null;
}
return indexedDBHelpers.getNameFromDatabaseFile(file.path).then(name => {
if (name) {
names.push(name);
}
return null;
});
});
} finally {
dirIterator.close();
}
return names;
}),
/**
* Removes any illegal characters from the host name to make it a valid file
* name.
*/
getSanitizedHost: function(host) {
return host.replace(ILLEGAL_CHAR_REGEX, "+");
},
/**
* Retrieves the proper indexed db database name from the provided .sqlite
* file location.
*/
getNameFromDatabaseFile: async(function*(path) {
let connection = null;
let retryCount = 0;
// Content pages might be having an open transaction for the same indexed db
// which this sqlite file belongs to. In that case, sqlite.openConnection
// will throw. Thus we retey for some time to see if lock is removed.
while (!connection && retryCount++ < 25) {
try {
connection = yield Sqlite.openConnection({ path: path });
} catch (ex) {
// Continuously retrying is overkill. Waiting for 100ms before next try
yield sleep(100);
}
}
if (!connection) {
return null;
}
let rows = yield connection.execute("SELECT name FROM database");
if (rows.length != 1) {
return null;
}
let name = rows[0].getResultByName("name");
yield connection.close();
return name;
})
};
module.exports = indexedDBHelpers.getDBNamesForHost;
If anyone want to use this then here is how you would use it
var getDBNamesForHost = require("./getDBNamesForHost");
getDBNamesForHost("http://example.com").then(names => {
console.log(names);
});
Think it would be cool if someone were to build a addon that adds indexedDB.mozGetDatabaseNames to work the same way as indexedDB.webkitGetDatabaseNames. I'm not doing that... will leave it up to you if you want. would be a grate dev tool to have ;)

ng-file-upload with multiple file upload and progress bar

I want to upload multiple files to rails server and want to show separate progress bar for each files. I am using ng-file-upload to upload files. files are getting uploaded but it showing progress for only last file and not for others..I am attaching my code. please help.
Angular controller code:
$scope.upload_file = function() {
var files = $scope.files;
var uploadUrl = base_url+"/upload_image";
var job_id = $scope.directive.id;
if(current_user.role == "third_party_vendor" || current_user.role == "employee")
{
var source = current_user.user_login+"-"+current_user.role;
}
else
{
var source = $scope.dataObj.source["user_login"]+"-"+$scope.dataObj.source["role"];
}
if(job_id === "" || job_id === undefined || files === undefined || files === ""){
error_notification("Please select a job and a file.");
return;
}
hideLoader();
$("#upload_resume_queue").modal('show');
var formData = new Array();
formData['job_id'] = job_id;
formData['context'] = "inside_page";
formData['source'] = source;
for (var i = 0; i < files.length; i++) {
var file = files[i];
console.log(file.name);
$scope.upload = $upload.upload({
url: uploadUrl,
data:{myObj: formData},
file: file,
}).progress(function(evt) {
//console.log('percent: ' +parseInt(100.0 * evt.loaded / evt.total));
file.progress = Math.round(evt.loaded * 100 / evt.total)
}).success(function(responseText) {
hideLoader();
try{
var response = responseText;
}catch(e){
error_notification("Invalid Excel file Imported.");
return;
}
if(response.status==='wrong_content_type')
error_notification("Please upload a valid file format.",0);
if(response.status==='job_application_present'){
$scope.duplicate = true;
$scope.jobID = job_id;
$scope.user_id = response.user_id;
$scope.application_id = response.application_id;
//showModal('#duplicate_application_modal');
error_notification("Job Application already present for this user and job.",0);
}
if(response.status==='invalid_email')
error_notification("The email in the resume is an invalid one.",0);
if(response.status==='success')
success_notification("The uploaded resume has been parsed.",0);
});
}
};
Html code:
<input type="file" class="required file_browse" ng-file-select="" ng-model="files" multiple />
I am not able test the following but, I think I found what is wrong.
In JavaScript variables are scoped to functions. So, in your for loop you change value of file in var file = files[i]; line. At the end, after for loop finished the value of file is the last file.
At some point .progress event is fired by ng-file-upload to notify you about progress (for one of the files). And you update the status, but, since file has the value of the last file, not the one you expected to be, the last file's status is being updated.
That's why only the last file updated. To solve, you need to access the correct file for each progress event. To do this, you can keep file variable using an anonymous function.
for (var i = 0; i < files.length; i++) {
(function(file) {
console.log(file.name);
$scope.upload = $upload.upload({
url: uploadUrl,
data:{myObj: formData},
file: file,
}).progress(function(evt) {
//console.log('percent: ' +parseInt(100.0 * evt.loaded / evt.total));
file.progress = Math.round(evt.loaded * 100 / evt.total)
}).success(function(responseText) {
// the other stuff
});
})(files[i])
}
In your code there may be other problems related to variable scopes and javascript event loop. For more information please take a look at this.

How to add Content Security Policy to Firefox extension

I have a plugin which I have to support both on Chrome and Firefox browsers. The plugin does cross script loading.
In Chrome, by adding the content security policy in my manifest.json file, I could get away with it. How can I do it Firefox extension?
I couldn't find a simple solution for my problem and upon looking up some firefox plugin extensions i had to come up with my own solution as below. The below solution was tested on FF 24.0 but should work on other versions as well.
Cc["#mozilla.org/observer-service;1"].getService(Ci.nsIObserverService)
.addObserver(_httpExamineCallback, "http-on-examine-response", false);
function _httpExamineCallback(aSubject, aTopic, aData) {
var httpChannel = aSubject.QueryInterface(Ci.nsIHttpChannel);
if (httpChannel.responseStatus !== 200) {
return;
}
var cspRules;
var mycsp;
// thre is no clean way to check the presence of csp header. an exception
// will be thrown if it is not there.
// https://developer.mozilla.org/en-US/docs/XPCOM_Interface_Reference/nsIHttpChannel
try {
cspRules = httpChannel.getResponseHeader("Content-Security-Policy");
mycsp = _getCspAppendingMyHostDirective(cspRules);
httpChannel.setResponseHeader('Content-Security-Policy', mycsp, false);
} catch (e) {
try {
// Fallback mechanism support
cspRules = httpChannel.getResponseHeader("X-Content-Security-Policy");
mycsp = _getCspAppendingMyHostDirective(cspRules);
httpChannel.setResponseHeader('X-Content-Security-Policy', mycsp, false);
} catch (e) {
// no csp headers defined
return;
}
}
};
/**
* #var cspRules : content security policy
* For my requirement i have to append rule just to 'script-src' directive. But you can
* modify this function to your need.
*
*/
function _getCspAppendingMyHostDirective(cspRules) {
var rules = cspRules.split(';'),
scriptSrcDefined = false,
defaultSrcIndex = -1;
for (var ii = 0; ii < rules.length; ii++) {
if ( rules[ii].toLowerCase().indexOf('script-src') != -1 ) {
rules[ii] = rules[ii] + ' <My CSP Rule gets appended here>';
scriptSrcDefined = true;
}
if (rules[ii].toLowerCase().indexOf('default-src') != -1) {
defaultSrcIndex = ii;
}
}
// few publishers will put every thing in the default (default-src) directive,
// without defining script-src. We need to modify those as well.
if ((!scriptSrcDefined) && (defaultSrcIndex != -1)) {
rules[defaultSrcIndex] = rules[defaultSrcIndex] + ' <My CSP rule gets appended here>';
}
return rules.join(';');
};
There are plans in the future to add content policy natively in the SDK (bug 852297), but there is a 3rd party module that should get you close to where you want to be: policy.js

Resources