Gulp: paths.scss.forEach is not a function - foreach

I have written the following gulp task:
var paths = require('./package.json').paths;
var watching = false;
gulp.task('scss', function () {
var processors = [
autoprefixer({
browsers: ['last 2 versions', '> 0.5%', 'ie >= 8', 'ie_mob >= 10', 'ff >= 30', 'chrome >= 34', 'ios >= 7', 'android >= 4']
}),
cssnano({
autoprefixer: false,
safe: true
})
];
var streams = merge();
var development = (gutil.env.type === 'dev10');
paths.scss.forEach(function (path) {
streams.add(gulp.src(path.scss.src + '**/*.scss')
.pipe(sourcemaps.init())
.pipe(sass())
.pipe(postcss(processors))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(path.dest)));
});
return streams;
});
const watch = gulp.parallel('js', 'scss');
exports.watch = watch;
Where js is another task, that is running without any error. Currently I am stuck at making this scss task work, as I am constantly receiving an error saying paths.scss.forEach is not a function. I have written the following in my package.json file that is placed in the same directory as of my gulpfile.js:
"paths": {
"js": {
"src": "../client/js/",
"dest": "../static/default/js/"
},
"scss": {
"src": "../client/scss/",
"dest": "../static/default/css/"
}
}
I am having the following traceback in the CMD:
[16:38:08] Starting 'watch'...
[16:38:08] Starting 'js'...
[16:38:08] Starting 'scss'...
[16:38:08] 'scss' errored after 84 ms
[16:38:08] TypeError: paths.scss.forEach is not a function
at D:\XYZ\gulpfile.js:92:13
at taskWrapper (D:\XYZ\node_modules\undertaker\lib\set-task.js:13:15)
at bound (domain.js:280:14)
at runBound (domain.js:293:12)
at asyncRunner (D:\XYZ\node_modules\async-done\index.js:55:18)
at _combinedTickCallback (internal/process/next_tick.js:73:7)
at process._tickCallback (internal/process/next_tick.js:104:9)
[16:38:08] 'watch' errored after 91 ms
[16:38:08] The following tasks did not complete: js
[16:38:08] Did you forget to signal async completion?
What should I do in this case, as I am stuck at the moment, without any ideas on how to proceed.

Your paths.scss is an object. forEach is a property of an array. So use a for in loop instead.
gulp.task('scss', function () {
// paths.scss.forEach(function (path) {
for (var path in paths.scss) {
console.log(path); // src, dest
streams.add(gulp.src(path + '**/*.scss')
// streams.add(gulp.src(path.scss.src + '**/*.scss')
// .pipe(sourcemaps.init())
// .pipe(sass())
// .pipe(postcss(processors))
// .pipe(sourcemaps.write('./'))
// .pipe(gulp.dest(path.dest)));
};
// return streams;
});
But you only want path.scss.src anyway - you don't want to run your sass loop over paths.scss.dest - which is what you are trying to do. Get rid of the for loop altogether:
gulp.src(paths.scss.src + '**/*.scss')
.pipe(sourcemaps.init())
.pipe(sass())
.pipe(postcss(processors))
.pipe(sourcemaps.write('./'))
.pipe(gulp.dest(paths.scss.dest)));

Related

RxJS Observable reset timeout

Is it possible to reset/increase the timeout of an Observable after another timeout has already been set? In the following example the timeout of 5 should be overriden with a timeout of 9999, but this does not work:
var source = Rx.Observable
.return(42)
.delay(1000)
.timeout(5)
.timeout(9999); // this statement should override the previous set timeout of 5 MS, but actually it does not
var subscription = source.subscribe(
function (x) {
console.log('Next: ' + x);
},
function (err) {
console.log('Error: ' + err);
},
function () {
console.log('Completed');
});
Are there any possibilities to override the already set timeout?
Short answer: As far as I know, there is no "legit" solution to this.
Hacky answer: You could connect to the source of the timeout-ed stream and set your own timeout, see the example below how this could be done.
However, I would not advise you to do that in any seriouse project - I'm sure there should be another solution to your problem.
var base = Rx.Observable
.return(42)
.delay(1000)
.timeout(1);
var patched = base.source.timeout(2000);
var subscription = patched.subscribe(
function (x) {
console.log('Patched Next: ' + x);
},
function (err) {
console.log('Patched Error: ' + err);
},
function () {
console.log('Patched Completed');
});
var subscription = base.subscribe(
function (x) {
console.log('Base Next: ' + x);
},
function (err) {
console.log('Base Error: ' + err);
},
function () {
console.log('Base Completed');
});
<script src="https://cdnjs.cloudflare.com/ajax/libs/rxjs/4.1.0/rx.all.js"></script>
What about if you use a subject to represent your timeout value?
timeoutSubject = new Rx.ReplaySubject(1);
timeoutSubject
.asObservable()
.switchMap((v) => source.timeout(v))
.subscribe((r) => console.log(r));
timeoutSubject.next(5);
timeoutSubject.next(9999);
switchMap should take handle of the unsubscribing/resubscribing for each one of the timeout values.

Cordova - write to and read from File

I am running into a problem with my cordova app where after an app update, localStorage is cleared. I am now attempting to save some basic user data to a text file instead, using cordova's FileWriter and FileReader.
At some point cordova updated and these methods and now requires a plugin to read and write files: https://cordova.apache.org/docs/en/latest/reference/cordova-plugin-file/index.html
Because my live iOS application uses the old version of cordova, it must write with FileWriter. My app update (not live) uses a newer version of cordova and therefore must use the plugin.
------ Possible workaround found, see update below ------
When trying to read the file, I am seeing the following error in xcode:
017-01-26 17:58:52.990 MyApp[40355:2892997] ERROR: Method 'hello there' not defined in Plugin 'File'
2017-01-26 17:58:52.991 MyApp[40355:2892997] -[CDVCommandQueue executePending] [Line 142] FAILED pluginJSON = ["File1875336264","File","hello there",["cdvfile://localhost/persistent/player_data.txt",null,null]]
Note: I'm running the following code in Safari console while the iOS simulator is running, just for convenience
My code for writing the file
(function() {
var onFail = function(err) {
alert('write action failed!');
alert(err.code);
};
var onGotFS = function(fileSystem) {
alert( 'gotFS' );
fileSystem.root.getFile("player_data.txt", {create: true}, onGotFileEntry, onFail);
};
var onGotFileEntry = function(fileEntry) {
alert( 'gotFileEntry, path: ' + fileEntry.fullPath );
fileEntry.createWriter(onGotFileWriter, onFail);
};
var onGotFileWriter = function(writer) {
alert( 'gotFileWriter' );
writer.onwrite = onFileWritten;
writer.onerror = onFail;
var data = "hello there";
alert( 'writing data: ' + data );
writer.write( data );
};
var onFileWritten = function(evt) {
alert( "saveTokenToFile SUCCESS!" );
};
// start process of looking up file system and writing
alert( 'requesting file system ...' );
window.requestFileSystem(LocalFileSystem.PERSISTENT, 1024, onGotFS, onFail);
})();
My code for reading the file:
(function() {
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, function (fs) {
fs.root.getFile("player_data.txt", { create: true }, function (fileEntry) {
fileEntry.file(function (file) {
var reader = new FileReader();
reader.onloadend = function() {
alert("Success");
console.log(evt.target.result);
};
reader.onerror = function() {
console.log("reader error");
};
reader.readAsText(file);
}, onErrorReadFile);
});
}, onErrorLoadFs);
var onErrorReadFile = function(){
console.log("error reading file");
}
var onErrorLoadFs = function() {
console.log("request file system has failed to load.");
}
})();
UPDATE
In case anyone else runs into this, I did find a way to read the saved file. The fileEntry object has a URL path to the saved file. To access the data on the file, I'm passing that URL to jQuery.getJSON, which gives us back some readable json.
readDataFromFile: function(){
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, function (fs) {
fs.root.getFile("player_data.txt", {}, function (fileEntry) {
var entryURL = fileEntry.nativeURL;
jQuery.getJSON(entryURL, function(data) {
console.log(data);
}).fail(function(){
console.log("file found, but contents were empty");
});
}, onErrorGetFile);
}, onErrorLoadFs);
var onErrorLoadFs = function() {
console.log("request file system has failed to load.");
};
var onErrorGetFile = function() {
console.log("requested file can not be read or does not exist");
};
}

Looking for a complete tutorial about process to update an electron app installed

I have read lots of things about this subjet but i can't find a complete documentation.
I succeeded to use electron-packager and electron-winstaller to get a setup.exe for my electron application.
I used electron-release-server to create a server to host my electron app to deploy.
I add in my electron app this peace of code
const autoUpdater = electron.autoUpdater;
var feedUrl = 'http://10.61.32.53:1337//download/:' + app.getVersion();
autoUpdater.setFeedURL(feedUrl);
// event handling after download new release
autoUpdater.on('update-downloaded', function (event, releaseNotes, releaseName, releaseDate, updateUrl, quitAndUpdate) {
// confirm install or not to user
var index = dialog.showMessageBox(mainWindow, {
type: 'info',
buttons: [i18n.__('Restart'), i18n.__('Later')],
title: "Typetalk",
message: i18n.__('The new version has been downloaded. Please restart the application to apply the updates.'),
detail: releaseName + "\n\n" + releaseNotes
});
if (index === 1) {
return;
}
// restart app, then update will be applied
quitAndUpdate();
} );
But when i install my application, i have this error :
In fact, i think i don't understand what to do client side but server side as well. Any help would be very appreciated !
Thanks in advance
I used following in my version and that works (except the Tray Icon):
app.on('ready', () => {
console.warn("Starting Autoupdater")
console.warn(app.getVersion())
var feedUrl = 'http://ls-desktop.herokuapp.com/update/' + os.platform() + '/' + app.getVersion() + '/';
autoUpdater.setFeedURL(feedUrl);
tray = new Tray(__dirname + '/LS.png')
console.log(__dirname + '/LS.png')
console.log('created');
autoUpdater.on('checking-for-update', function() {
tray.displayBalloon({
title: 'Autoupdater',
content: 'Checking for Update!'
})
});
autoUpdater.on('update-available', function() {
console.log("update-available");
});
autoUpdater.on('update-not-available', function() {
tray.displayBalloon({
title: 'Autoupdater',
content: 'No Updates availible!'
})
});
autoUpdater.on('update-downloaded', function() {
console.log(" update-downloaded");
});
setTimeout(function() {autoUpdater.checkForUpdates()}, 10000);
autoUpdater.on('update-downloaded', function (event, releaseNotes, releaseName, releaseDate, updateUrl, quitAndUpdate) {
var index = dialog.showMessageBox({
type: 'info',
buttons: ['Restart', 'Later'],
title: "Lornsenschule Vertretungsplan",
message: ('The new version has been downloaded. Please restart the application to apply the updates.'),
detail: releaseName + "\n\n" + releaseNotes
});
if (index === 1) {
return;
}
quitAndUpdate()
});
})
Note the setTimeout(function() {autoUpdater.checkForUpdates()}, 10000); that is the real workaround that I used. the rest is just an nice Addition I think

Phonegap iOS Download files from online to local file system not working

Using phonegap build for iOS platform, the download procedure is not working!
Could someone tell me why the code below returned errors messages:
("download error source " + error.source);
could not download the image from online
// e.g: http://www.sushikoapp.com/img/branches/thumbs/big_sushiko-bchamoun.jpg
("upload error code" + error.code); sometimes 1 and sometimes 3
The code is below:
var ios_directoryEntry = fileSystem.root;
ios_directoryEntry.getDirectory("branches", { create: true, exclusive: false }, onDirectorySuccessiOS, onDirectoryFailiOS);
var ios_rootdir = fileSystem.root;
//var ios_fp = ios_rootdir.fullPath;
var ios_fp = ios_rootdir.toURL();
ios_fp = ios_fp + "branches/" ;
//ios_fp = "cdvfile://localhost/persistent/branches/";
var fileTransfer = new FileTransfer();
fileTransfer.download(encodeURI(imgURL + "branches/thumbs/big_sushiko-bchamoun.jpg" ), ios_fp + "big_big_sushiko-bchamoun.jpg",
function (entry) {
alert("download complete: " + entry.fullPath);
}
},
function (error) {
//Download abort errors or download failed errors
alert("download error source " + error.source);
alert("upload error code" + error.code);
}
);
Thank you for your suggestion...
The problem is you're treating the file API operations as synchronous, when they're actually asynchronous.
You can use cordova.file to reference the target folder on the filesystem (see here: https://github.com/apache/cordova-plugin-file/blob/master/doc/index.md).
So try something like this:
resolveLocalFileSystemURL(cordova.file.documentsDirectory, onGetDocuments, function(error) {
console.error("Error getting Documents directory: "+error.code);
});
function onGetDocuments(entry) {
console.log("Got Documents directory");
entry.getDirectory("branches", { create: true, exclusive: false }, onGetBranches, function(error) {
console.error("Error creating/getting branches directory: "+error.code);
});
}
function onGetBranches(entry){
console.log("Got branches directory");
doFileTransfer(entry.toURL());
}
function doFileTransfer(ios_fp){
var fileTransfer = new FileTransfer();
fileTransfer.download(encodeURI(imgURL + "branches/thumbs/big_sushiko-bchamoun.jpg" ), ios_fp + "big_big_sushiko-bchamoun.jpg",
function (entry) {
alert("download complete: " + entry.fullPath);
}
},
function (error) {
//Download abort errors or download failed errors
alert("download error source " + error.source);
alert("upload error code" + error.code);
}
);
}
You are trying to write a file to the iOS root File system, this is not possible since all app in iOS are sandboxed and can only access their own sandbox.
So don't use fileSystem.root but fileSystem.documents.

Grunt / PhoneGap : Warning: stdout maxBuffer exceeded

Quite randomly I get the following message Warning: stdout maxBuffer exceeded when I use my grunt script to launch the iOS simulator.
Any idea what could trigger this ?
Here is the part of my Gruntfile where it occurs :
grunt.registerTask('emulator', 'Launch an emulator', function (platform, targetId) {
if (arguments.length === 0 || !platform) {
grunt.fail.fatal('No platform was specified');
} else {
grunt.log.writeln('We launch the emulator for ' + platform);
if (targetId) {
grunt.log.writeln('Specified targetId: ' + targetId);
}
var fs = require('fs'), execInstance, directoryPath, command, done = this.async();
if (platform === 'ios') {
directoryPath = phoneGapBuildPath + '/platforms/' + platform.toLowerCase() + '/cordova/';
command = './run';
} else {
directoryPath = phoneGapBuildPath + '/platforms/' + platform.toLowerCase() + '/cordova/';
command = 'node run ' + (targetId ? '--target=' + targetId : '--debug');
}
if (!fs.existsSync(directoryPath)) {
grunt.fail.fatal('You need to launch the compile phase');
return;
}
grunt.log.writeln('We run the following command: ' + command);
execInstance = require('child_process').exec(command, {
cwd : directoryPath
}, function (err) {
done(err);
});
execInstance.stdout.on('data', function (data) {
grunt.log.writeln(data);
});
execInstance.stderr.on('data', function (data) {
grunt.log.error(data);
});
}
});
You can use an option to disable maxBuffer :
shell: {
yourCommand: {
command: [
'command to execute'
],
options: {
execOptions: {
maxBuffer: Infinity
}
}
}
}
When you spawn the child process, try bumping maxBuffer in your config object:
execInstance = require('child_process').exec(command, {
cwd : directoryPath,
maxBuffer: 500 * 1024
}, function (err) {
done(err);
});
According to the docs, the default maxBuffer size is 200 * 1024 (http://nodejs.org/api/child_process.html#child_process_child_process_exec_command_options_callback)

Resources