I figured if the devtool can list all created IndexedDB, then there should be an API to retrieve them...?
Dose anyone know how I get get a list of names with the help of a firefox SDK?
I did dig into the code and looked at the source. unfortunately there wasn't any convenient API that would pull out all the databases from one host.
The way they did it was to lurk around in the user profiles folder and look at all folder and files for .sqlite and make a sql query (multiple times in case there is an ongoing transaction) to each .sqlite and ask for the database name
it came down this peace of code
// striped down version of: https://dxr.mozilla.org/mozilla-central/source/devtools/server/actors/storage.js
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const {async} = require("resource://gre/modules/devtools/async-utils");
const { setTimeout } = require("sdk/timers");
const promise = require("sdk/core/promise");
// A RegExp for characters that cannot appear in a file/directory name. This is
// used to sanitize the host name for indexed db to lookup whether the file is
// present in <profileDir>/storage/default/ location
const illegalFileNameCharacters = [
"[",
// Control characters \001 to \036
"\\x00-\\x24",
// Special characters
"/:*?\\\"<>|\\\\",
"]"
].join("");
const ILLEGAL_CHAR_REGEX = new RegExp(illegalFileNameCharacters, "g");
var OS = require("resource://gre/modules/osfile.jsm").OS;
var Sqlite = require("resource://gre/modules/Sqlite.jsm");
/**
* An async method equivalent to setTimeout but using Promises
*
* #param {number} time
* The wait time in milliseconds.
*/
function sleep(time) {
let deferred = promise.defer();
setTimeout(() => {
deferred.resolve(null);
}, time);
return deferred.promise;
}
var indexedDBHelpers = {
/**
* Fetches all the databases and their metadata for the given `host`.
*/
getDBNamesForHost: async(function*(host) {
let sanitizedHost = indexedDBHelpers.getSanitizedHost(host);
let directory = OS.Path.join(OS.Constants.Path.profileDir, "storage",
"default", sanitizedHost, "idb");
let exists = yield OS.File.exists(directory);
if (!exists && host.startsWith("about:")) {
// try for moz-safe-about directory
sanitizedHost = indexedDBHelpers.getSanitizedHost("moz-safe-" + host);
directory = OS.Path.join(OS.Constants.Path.profileDir, "storage",
"permanent", sanitizedHost, "idb");
exists = yield OS.File.exists(directory);
}
if (!exists) {
return [];
}
let names = [];
let dirIterator = new OS.File.DirectoryIterator(directory);
try {
yield dirIterator.forEach(file => {
// Skip directories.
if (file.isDir) {
return null;
}
// Skip any non-sqlite files.
if (!file.name.endsWith(".sqlite")) {
return null;
}
return indexedDBHelpers.getNameFromDatabaseFile(file.path).then(name => {
if (name) {
names.push(name);
}
return null;
});
});
} finally {
dirIterator.close();
}
return names;
}),
/**
* Removes any illegal characters from the host name to make it a valid file
* name.
*/
getSanitizedHost: function(host) {
return host.replace(ILLEGAL_CHAR_REGEX, "+");
},
/**
* Retrieves the proper indexed db database name from the provided .sqlite
* file location.
*/
getNameFromDatabaseFile: async(function*(path) {
let connection = null;
let retryCount = 0;
// Content pages might be having an open transaction for the same indexed db
// which this sqlite file belongs to. In that case, sqlite.openConnection
// will throw. Thus we retey for some time to see if lock is removed.
while (!connection && retryCount++ < 25) {
try {
connection = yield Sqlite.openConnection({ path: path });
} catch (ex) {
// Continuously retrying is overkill. Waiting for 100ms before next try
yield sleep(100);
}
}
if (!connection) {
return null;
}
let rows = yield connection.execute("SELECT name FROM database");
if (rows.length != 1) {
return null;
}
let name = rows[0].getResultByName("name");
yield connection.close();
return name;
})
};
module.exports = indexedDBHelpers.getDBNamesForHost;
If anyone want to use this then here is how you would use it
var getDBNamesForHost = require("./getDBNamesForHost");
getDBNamesForHost("http://example.com").then(names => {
console.log(names);
});
Think it would be cool if someone were to build a addon that adds indexedDB.mozGetDatabaseNames to work the same way as indexedDB.webkitGetDatabaseNames. I'm not doing that... will leave it up to you if you want. would be a grate dev tool to have ;)
Related
I want to add files to a list and then access them in a for loop. This is how I try to do it:
private get_app_list () {
var file = new File.new_for_path (/usr/share/applications);
List<File> app_list = new List<File> ();
foreach (File desktop_file in app_list) {
// other code here
}
}
What is the right way to access files stored in a directory and then add them to a list??
using Posix;
...
List<File> app_list = new List<File> ();
//Open directory. Returns null on error
var dirHandle = Posix.opendir("/usr/share/applications");
unowned DirEnt entry;
//While there is an entry to read in the directory
while((entry = readdir(dir)) != null) {
//Get the name
var name = (string) entry.d_name;
//And add a new file to the app_list
app_list.add(new File.new_for_path("/usr/share/applications"+name);
}
If you want to merely display the available apps on system, you could use the utilities supplied by the Gio-2.0 lib. After adding dependency ('gio-2.0'), to your meson.build file you could use code similar to the following:
/* We use a `GListStore` here, which is a simple array-like list implementation
* for manual management.
* List models need to know what type of data they provide, so we need to
* provide the type here. As we want to do a list of applications, `GAppInfo`
* is the object we provide.
*/
var app_list = new GLib.ListStore (typeof (GLib.AppInfo));
var apps = GLib.AppInfo.get_all ();
foreach (var app in apps) {
app_list.append (app);
}
If however you need to list files inside a directory, it's possible also to use the higher level API provided by the same gio-2.0 library. Here is a sample code to enumerate files inside "/usr/share/applications/"
void main () {
var app_dir = GLib.File.new_for_path ("/usr/share/applications");
try {
var cancellable = new Cancellable ();
GLib.FileEnumerator enumerator = app_dir.enumerate_children (
GLib.FileAttribute.STANDARD_DISPLAY_NAME,
GLib.FileQueryInfoFlags.NOFOLLOW_SYMLINKS,
cancellable
);
FileInfo ? file_info = null;
while (!cancellable.is_cancelled () &&
((file_info = enumerator.next_file (cancellable)) != null)) {
// Ignore directories
if (file_info.get_file_type () == GLib.FileType.DIRECTORY) {
continue;
}
// files could be added to a list_store here.
/*
* var files_list = new GLib.ListStore (typeof (GLib.FileInfo));
* files_list.append (file_info);
*/
print (file_info.get_display_name () + "\n");
}
} catch (GLib.Error err) {
info ("%s\n", err.message);
}
}
I hope this could be of any help.
I'm trying to recover the history of a single asset. The model is defined like the following
namespace org.example.basic
asset SampleAsset identified by assetId {
o String assetId
--> SampleParticipant owner
o String value
}
participant SampleParticipant identified by participantId {
o String participantId
o String firstName
o String lastName
}
transaction GetAssetHistory {
o String assetId
}
event SampleEvent {
--> SampleAsset asset
o String oldValue
o String newValue
}
I generate a single participant and a new asset referencing to the previous participant. And I proceed to update the asset value variable value. But reading about asset update I found the following:
async function getAssetHistory(tx) {
//How can I get a single asset history using the tx.assetId value??
let historian = await businessNetworkConnection.getHistorian();
let historianRecords = historian.getAll();
console.log(prettyoutput(historianRecords));
}
When I deploy the bna and I call the function I get the following:
img
In other functions i use the RuntimeApi but I dont know if businessNetworkConnection is a Runtime API call.
Any idea of how can a get a single asset history?
Any example on internet?
***************** UPDATE
I change the way to recover a particula asset history. Doing the following:
In js file
/**
* Sample read-only transaction
* #param {org.example.trading.MyPartHistory} tx
* #returns {org.example.trading.Trader[]} All trxns
* #transaction
*/
async function participantHistory(tx) {
console.log('1');
const partId = tx.tradeid;
console.log('2');
const nativeSupport = tx.nativeSupport;
// const partRegistry = await getParticipantRegistry('org.example.trading.Trader')
console.log('3');
const nativeKey = getNativeAPI().createCompositeKey('Asset:org.example.trading.Trader', [partId]);
console.log('4');
const iterator = await getNativeAPI().getHistoryForKey(nativeKey);
let results = [];
let res = {done : false};
while (!res.done) {
res = await iterator.next();
if (res && res.value && res.value.value) {
let val = res.value.value.toString('utf8');
if (val.length > 0) {
console.log("#debug val is " + val );
results.push(JSON.parse(val));
}
}
if (res && res.done) {
try {
iterator.close();
}
catch (err) {
}
}
}
var newArray = [];
for (const item of results) {
newArray.push(getSerializer().fromJSON(item));
}
console.log("#debug the results to be returned are as follows: ");
return newArray; // returns something to my NodeJS client (called via REST API)
}
In model file
#commit(false)
#returns(Trader[])
transaction MyPartHistory {
o String tradeId
}
I create a single asset an di update then with other values. But whe I call the MyPartHistory i get the following message:
Error: Native API not available in web runtime
use of the native api is only available when you are running your business network in a real fabric environment. You can't use it in the online playground environment. You will have to setup a real fabric environment and then run playground locally connecting to that fabric in order to test your business network.
We are facing this issues with our app, only in safari browser, especially on iOS devices.
Current behavior
Not sure if this is a known issue (I tried searching but found nothing). Safari for Mac appears to be silently dropping web socket connections due to inactivity/idle if the page/tab is not in focus.
The biggest issue is that in mobile iOS X is very persistent.
Steps to reproduce
Open Safari > Website loads > Put Safari in Idle and open any application or lock the device.
On wake up, Safari is closing the connection and the data is not displayed anymore, we get infinite loading of the modules where we request the data.
Expected behavior
Websockets should be kept alive via the heartbeat functionality. Not seeing this behavior in other browsers so unlikely to be the code.
Is this possibly some sort of power-saving feature that is overriding/ignoring the heartbeats?
import 'whatwg-fetch';
import Config from "../config/main";
import WS from "./websocket";
import Helpers from "./helperFunctions";
var Zergling = (function (WS, Config) {
'use strict';
var Zergling = {};
var subscriptions = {}, useWebSocket = false, sessionRequestIsInProgress = false, loginInProgress = false,
uiLogggedIn = false, // uiLogggedIn is the login state displayed in UI (sometimes it differs from real one, see delayedLogoutIfNotRestored func)
authData, session, connectionAvailable, isLoggedIn, longPollUrl;
Zergling.loginStates = {
LOGGED_OUT: 0,
LOGGED_IN: 1,
IN_PROGRESS: 2
};
Zergling.codes = { // Swarm response codes
OK: 0,
SESSION_LOST: 5,
NEED_TO_LOGIN: 12
};
function getLanguageCode (lng) {
if (Config.swarm.languageMap && Config.swarm.languageMap[lng]) {
return Config.swarm.languageMap[lng];
}
return lng;
}
//helper func for fetch
function checkStatus (response) {
if (response.status >= 200 && response.status < 300) {
return response;
} else {
var error = new Error(response.statusText);
error.response = response;
throw error;
}
}
//helper func for fetch
function parseJSON (response) {
return response.json();
}
/**
* #description returns randomly selected(taking weight into consideration) long poll url
* #returns {String} long polling URL
*/
function getLongPollUrl () {
if (!longPollUrl) {
longPollUrl = Helpers.getWeightedRandom(Config.swarm.url).url;
console.debug('long Polling URL selected:', longPollUrl);
}
return longPollUrl;
}
/**
* #description
* Applies the diff on object
* properties having null values in diff are removed from object, others' values are replaced.
*
* Also checks the 'price' field for changes and adds new field 'price_change' as sibling
* which indicates the change direction (1 - up, -1 down, null - unchanged)
*
* #param {Object} current current object
* #param {Object} diff received diff
*/
function destructivelyUpdateObject (current, diff) {
if (current === undefined || !(current instanceof Object)) {
throw new Error('wrong call');
}
for (var key in diff) {
if (!diff.hasOwnProperty(key)) continue;
var val = diff[key];
if (val === null) {
delete current[key];
} else if (typeof val !== 'object') {
current[key] = val;
} else { // diff[key] is Object
if (typeof current[key] !== 'object' || current[key] === null) {
current[key] = val;
} else {
var hasPrice = (current[key].price !== undefined);
var oldPrice;
if (hasPrice) {
oldPrice = current[key].price;
}
destructivelyUpdateObject(current[key], val);
if (hasPrice) {
current[key].price_change = (val.price === oldPrice) ? null : (oldPrice < val.price) * 2 - 1;
}
}
}
}
}
This is and iOS feature that protects users against code draining their battery...
Push notifications for background applications should be performed using iOS's push notification system rather than by keeping an open connection alive.
There are hacks around this limitation, but the truth is that the limitation is good for the users and shouldn't be circumvented.
Read the technical note in the link for more details.
From the API page, I gather there's no function for what I'm trying to do. I want to read text from a file storing it as a list of strings, manipulate the text, and save the file. The first part is easy using the function:
abstract List<String> readAsLinesSync([Encoding encoding = Encoding.UTF_8])
However, there is no function that let's me write the contents of the list directly to the file e.g.
abstract void writeAsLinesSync(List<String> contents, [Encoding encoding = Encoding.UTF_8, FileMode mode = FileMode.WRITE])
Instead, I've been using:
abstract void writeAsStringSync(String contents, [Encoding encoding = Encoding.UTF_8, FileMode mode = FileMode.WRITE])
by reducing the list to a single string. I'm sure I could also use a for loop and feed to a stream line by line. I was wondering two things:
Is there a way to just hand the file a list of strings for writing?
Why is there a readAsLinesSync but no writeAsLinesSync? Is this an oversight or a design decision?
Thanks
I just made my own export class that handles writes to a file or for sending the data to a websocket.
Usage:
exportToWeb(mapOrList, 'local', 8080);
exportToFile(mapOrList, 'local/data/data.txt');
Class:
//Save data to a file.
void exportToFile(var data, String filename) =>
new _Export(data).toFile(filename);
//Send data to a websocket.
void exportToWeb(var data, String host, int port) =>
new _Export(data).toWeb(host, port);
class _Export {
HashMap mapData;
List listData;
bool isMap = false;
bool isComplex = false;
_Export(var data) {
// Check is input is List of Map data structure.
if (data.runtimeType == HashMap) {
isMap = true;
mapData = data;
} else if (data.runtimeType == List) {
listData = data;
if (data.every((element) => element is Complex)) {
isComplex = true;
}
} else {
throw new ArgumentError("input data is not valid.");
}
}
// Save to a file using an IOSink. Handles Map, List and List<Complex>.
void toFile(String filename) {
List<String> tokens = filename.split(new RegExp(r'\.(?=[^.]+$)'));
if (tokens.length == 1) tokens.add('txt');
if (isMap) {
mapData.forEach((k, v) {
File fileHandle = new File('${tokens[0]}_k$k.${tokens[1]}');
IOSink dataFile = fileHandle.openWrite();
for (var i = 0; i < mapData[k].length; i++) {
dataFile.write('${mapData[k][i].real}\t'
'${mapData[k][i].imag}\n');
}
dataFile.close();
});
} else {
File fileHandle = new File('${tokens[0]}_data.${tokens[1]}');
IOSink dataFile = fileHandle.openWrite();
if (isComplex) {
for (var i = 0; i < listData.length; i++) {
listData[i] = listData[i].cround2;
dataFile.write("${listData[i].real}\t${listData[i].imag}\n");
}
} else {
for (var i = 0; i < listData.length; i++) {
dataFile.write('${listData[i]}\n');
}
}
dataFile.close();
}
}
// Set up a websocket to send data to a client.
void toWeb(String host, int port) {
//connect with ws://localhost:8080/ws
//for echo - http://www.websocket.org/echo.html
if (host == 'local') host = '127.0.0.1';
HttpServer.bind(host, port).then((server) {
server.transform(new WebSocketTransformer()).listen((WebSocket webSocket) {
webSocket.listen((message) {
var msg = json.parse(message);
print("Received the following message: \n"
"${msg["request"]}\n${msg["date"]}");
if (isMap) {
webSocket.send(json.stringify(mapData));
} else {
if (isComplex) {
List real = new List(listData.length);
List imag = new List(listData.length);
for (var i = 0; i < listData.length; i++) {
listData[i] = listData[i].cround2;
real[i] = listData[i].real;
imag[i] = listData[i].imag;
}
webSocket.send(json.stringify({"real": real, "imag": imag}));
} else {
webSocket.send(json.stringify({"real": listData, "imag": null}));
}
}
},
onDone: () {
print('Connection closed by client: Status - ${webSocket.closeCode}'
' : Reason - ${webSocket.closeReason}');
server.close();
});
});
});
}
}
I asked Mads Agers about this. He works on the io module. He said that he decided not to add writeAsLines because he didn't find it useful. For one it is trivial to write the for loop and the other thing is that you have to parameterize it which the kind of line separator that you want to use. He said he can add it if there is a strong feeling that it would be valuable. He didn't immediately see a lot of value in it.
Im trying to implement nicEdit with the nicupload plugin, but when I select a file to upload it says "Failed to upload image", and the server response says "Invalid Upload ID".
This is the code that calls the script and initializes:
<script src="http://js.nicedit.com/nicEdit-latest.js" type="text/javascript"></script>
<script type="text/javascript">//<![CDATA[
bkLib.onDomLoaded(function() {
new nicEditor({uploadURI : '../../nicedit/nicUpload.php'}).panelInstance('area1');
});
//]]>
</script>
The path to nicUpload.php is correct, and the code is the one that can be found in the documentation: http://nicedit.com/src/nicUpload/nicUpload.js
I made the upload folder changes, and set write permissions. According to the documentation (http://wiki.nicedit.com/w/page/515/Configuration%20Options), thats all, but i keep getting errors. Any ideas?
After looking for an solution a long time (lot of posts without real solution), i now fixed the code myself. I'm now able to upload an image to my own server. Thx to firebug and eclipse ;-)
The main problem is that the nicUpload.php is old and not working with the current nicEdit-Upload function.
Missing is the error handling, feel free to add this...
Add the nicEditor to your php file and configure it to use the nicEdit.php:
new nicEditor({iconsPath : 'pics/nicEditorIcons.gif', uploadURI : 'script/nicUpload.php'}
Download the nicEdit.js uncompressed and change the following lines in nicEdit.js:
uploadFile : function() {
var file = this.fileInput.files[0];
if (!file || !file.type.match(/image.*/)) {
this.onError("Only image files can be uploaded");
return;
}
this.fileInput.setStyle({ display: 'none' });
this.setProgress(0);
var fd = new FormData();
fd.append("image", file);
fd.append("key", "b7ea18a4ecbda8e92203fa4968d10660");
var xhr = new XMLHttpRequest();
xhr.open("POST", this.ne.options.uploadURI || this.nicURI);
xhr.onload = function() {
try {
var res = JSON.parse(xhr.responseText);
} catch(e) {
return this.onError();
}
//this.onUploaded(res.upload); // CHANGE HERE
this.onUploaded(res);
}.closure(this);
xhr.onerror = this.onError.closure(this);
xhr.upload.onprogress = function(e) {
this.setProgress(e.loaded / e.total);
}.closure(this);
xhr.send(fd);
},
onUploaded : function(options) {
this.removePane();
//var src = options.links.original; // CHANGE HERE
var src = options['url'];
if(!this.im) {
this.ne.selectedInstance.restoreRng();
//var tmp = 'javascript:nicImTemp();';
this.ne.nicCommand("insertImage", src);
this.im = this.findElm('IMG','src', src);
}
var w = parseInt(this.ne.selectedInstance.elm.getStyle('width'));
if(this.im) {
this.im.setAttributes({
src : src,
width : (w && options.image.width) ? Math.min(w, options.image.width) : ''
});
}
}
Change the nicUpload.php like this
<?php
/* NicEdit - Micro Inline WYSIWYG
* Copyright 2007-2009 Brian Kirchoff
*
* NicEdit is distributed under the terms of the MIT license
* For more information visit http://nicedit.com/
* Do not remove this copyright message
*
* nicUpload Reciever Script PHP Edition
* #description: Save images uploaded for a users computer to a directory, and
* return the URL of the image to the client for use in nicEdit
* #author: Brian Kirchoff <briankircho#gmail.com>
* #sponsored by: DotConcepts (http://www.dotconcepts.net)
* #version: 0.9.0
*/
/*
* #author: Christoph Pahre
* #version: 0.1
* #description: different modification, so that this php file is working with the newest nicEdit.js (needs also modification - #see)
* #see http://stackoverflow.com/questions/11677128/nicupload-says-invalid-upload-id-cant-make-it-works
*/
define('NICUPLOAD_PATH', '../images/uploadedImages'); // Set the path (relative or absolute) to
// the directory to save image files
define('NICUPLOAD_URI', '../images/uploadedImages'); // Set the URL (relative or absolute) to
// the directory defined above
$nicupload_allowed_extensions = array('jpg','jpeg','png','gif','bmp');
if(!function_exists('json_encode')) {
die('{"error" : "Image upload host does not have the required dependicies (json_encode/decode)"}');
}
if($_SERVER['REQUEST_METHOD']=='POST') { // Upload is complete
$file = $_FILES['image'];
$image = $file['tmp_name'];
$id = $file['name'];
$max_upload_size = ini_max_upload_size();
if(!$file) {
nicupload_error('Must be less than '.bytes_to_readable($max_upload_size));
}
$ext = strtolower(substr(strrchr($file['name'], '.'), 1));
#$size = getimagesize($image);
if(!$size || !in_array($ext, $nicupload_allowed_extensions)) {
nicupload_error('Invalid image file, must be a valid image less than '.bytes_to_readable($max_upload_size));
}
$filename = $id;
$path = NICUPLOAD_PATH.'/'.$filename;
if(!move_uploaded_file($image, $path)) {
nicupload_error('Server error, failed to move file');
}
$status = array();
$status['done'] = 1;
$status['width'] = $size[0];
$rp = realpath($path);
$status['url'] = NICUPLOAD_URI ."/".$id;
nicupload_output($status, false);
exit;
}
// UTILITY FUNCTIONS
function nicupload_error($msg) {
echo nicupload_output(array('error' => $msg));
}
function nicupload_output($status, $showLoadingMsg = false) {
$script = json_encode($status);
$script = str_replace("\\/", '/', $script);
echo $script;
exit;
}
function ini_max_upload_size() {
$post_size = ini_get('post_max_size');
$upload_size = ini_get('upload_max_filesize');
if(!$post_size) $post_size = '8M';
if(!$upload_size) $upload_size = '2M';
return min( ini_bytes_from_string($post_size), ini_bytes_from_string($upload_size) );
}
function ini_bytes_from_string($val) {
$val = trim($val);
$last = strtolower($val[strlen($val)-1]);
switch($last) {
// The 'G' modifier is available since PHP 5.1.0
case 'g':
$val *= 1024;
case 'm':
$val *= 1024;
case 'k':
$val *= 1024;
}
return $val;
}
function bytes_to_readable( $bytes ) {
if ($bytes<=0)
return '0 Byte';
$convention=1000; //[1000->10^x|1024->2^x]
$s=array('B', 'kB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB');
$e=floor(log($bytes,$convention));
return round($bytes/pow($convention,$e),2).' '.$s[$e];
}
?>
You can manually pass an id to your script: e.g nicUpload.php?id=introPicHeader and it will become introPicHeader.jpg (or appropriate extension) in the images folder you defined.
However, I have noticed that this script is broken and cannot access the configuration option uploadURI if specified directly in nicEdit.js during the nicEditorAdvancedButton.extend({. This causes access to an relatively pathed "Unknown" resource, causing an error.
The documentation implies otherwise and the fact that the nicURI was specified here for imgur.com (maybe as a default) gave me the impression I could also add a uploadURI reference to the nicUpload.php script in a single place rather than on every editor instantiation.
Update
This works if you pass it during instantiation, which I guess does allow for easy dynamic id population.
Unfortunately, the nicUpload.php is riddled with errors and it's output is not JSON. The editor expects to parse JSON and finds a script tag and errors with unexpected token "<".
There are a raft of other errors which I will attempt to identify:
In nicEdit.js
A.append("image") should be infact A.append("nicImage")
this.onUploaded(D.upload) should become this.onUploaded(D)
this.onUploaded(D) should be moved to within the try block after var D=JSON.parse(C.responseText) to fix variable scope issues
B.image.width needs to become B.width
In nicUpload.php
JSON output is not formed correctly, comment out html output and output just json_encode($status).
JSON output needs to return a key/value pair named links rather than url although renaming the var D=B.links to var D=B.url in nicEdit.js would also suffice as a fix.
Both php and javascript code leaves a lot to be desired, I get many errors regularly and have been fixing them myself.