I am trying to get a local ddev setup working with the stack:
Apache
PHP 8.1
Laravel 9
Vite
Soketi for WebSockets
I am confused on how to configure ports on ddev, and which host/port i should use on the frontend.
.ddev/config.yaml
name: laravel-vite-inertia
type: laravel
docroot: public
php_version: "8.1"
webserver_type: apache-fpm
router_http_port: "80"
router_https_port: "443"
xdebug_enabled: false
additional_hostnames: []
additional_fqdns: []
database:
type: mysql
version: "8.0"
nfs_mount_enabled: false
mutagen_enabled: false
use_dns_when_possible: true
composer_version: "2"
web_environment: []
nodejs_version: "16"
.ddev/docker-compose.vite.yaml
# Override the web container's standard HTTP_EXPOSE and HTTPS_EXPOSE services
# to expose port 3000 of DDEV's web container.
version: '3.6'
services:
web:
# ports are a list of exposed *container* ports
ports:
- "3000"
- "6001:6001"
environment:
- HTTP_EXPOSE=${DDEV_ROUTER_HTTP_PORT}:80,${DDEV_MAILHOG_PORT}:8025,3001:3000
- HTTPS_EXPOSE=${DDEV_ROUTER_HTTPS_PORT}:80,${DDEV_MAILHOG_HTTPS_PORT}:8025,3000:3000
/resources/js/app.js
import Echo from 'laravel-echo';
import pusher from 'pusher-js';
let laravelEcho = new Echo({
broadcaster: 'pusher',
key: 'app-key',
wsHost: '127.0.0.1', // <- I assume this is the error?
wsPort: 6001,
wssPort: 6001,
forceTLS: false,
encrypted: true,
disableStats: true,
enabledTransports: ['ws', 'wss'],
});
laravelEcho.channel(`auctions`)
.listen('AuctionIndexVisited', (e) => {
console.log('AuctionIndexVisited', e);
});
laravel .env
#...
PUSHER_HOST=127.0.0.1
PUSHER_PORT=6001
PUSHER_APP_KEY="app-key"
PUSHER_APP_ID="app-id"
PUSHER_APP_SECRET="app-secret"
MIX_PUSHER_APP_KEY="${PUSHER_APP_KEY}"
MIX_PUSHER_HOST="${PUSHER_HOST}"
MIX_PUSHER_PORT="${PUSHER_PORT}"
Laravel does broadcasts to soketi.
The frontend is not able to connect to the websocket...
Fix: Do not use port 6000.
As it turns out, port 6000 is considered "unsafe" because its the the default port for X11.
So I switched to port 9000.
Vite -> Port 3000
Soketi -> Port 9000
My working setup:
.ddev/docker-compose.vite.yaml
# Override the web container's standard HTTP_EXPOSE and HTTPS_EXPOSE services
# to expose port 3000 of DDEV's web container.
version: '3.6'
services:
web:
# ports are a list of exposed *container* ports
ports:
- "3000"
- "9000"
environment:
- HTTP_EXPOSE=${DDEV_ROUTER_HTTP_PORT}:80,${DDEV_MAILHOG_PORT}:8025,3001:3000,9001:9000
- HTTPS_EXPOSE=${DDEV_ROUTER_HTTPS_PORT}:80,${DDEV_MAILHOG_HTTPS_PORT}:8025,3000:3000,9000:9000
vite.config.js
import path from "path";
import vue from '#vitejs/plugin-vue';
import vuetify from '#vuetify/vite-plugin';
export default ({command}) => ({
base: command === 'serve' ? '' : '/dist/',
publicDir: path.resolve(__dirname, 'resources/copy-to-public'),
server: {
host: '0.0.0.0',
port: 3000,
},
plugins: [
vue(),
// https://github.com/vuetifyjs/vuetify-loader/tree/next/packages/vite-plugin
vuetify({
autoImport: true,
}),
],
optimizeDeps: {
include: [
"#inertiajs/inertia",
"#inertiajs/inertia-vue3",
"axios",
"vue",
],
},
build: {
manifest: true,
outDir: path.resolve(__dirname, 'public/dist'),
rollupOptions: {
input: '/resources/js/app.js',
}
},
resolve: {
alias: {
'#': path.resolve(__dirname, 'resources/js'),
},
extensions: [
'.js',
'.json',
'.jsx',
'.mjs',
'.ts',
'.tsx',
'.vue',
]
},
});
Add localhost:3000 to sanctum config.
/config/sanctum.php
'stateful' => explode(',', env('SANCTUM_STATEFUL_DOMAINS', sprintf(
'%s%s',
'localhost,localhost:3000,127.0.0.1,127.0.0.1:8000,::1',
Sanctum::currentApplicationUrlWithPort()
))),
soketi.config.json
-> Caveat: run soketi not inside your root dir! It will read the laravel .env and not work as expected.
{
"debug": true,
"ip": "0.0.0.0",
"address": "0.0.0.0",
"host": "0.0.0.0",
"port": 9000,
"appManager.array.apps": [
{
"id": "app-id",
"key": "app-key",
"secret": "app-secret",
"webhooks": [
]
}
]
}
I am using soketi as pusher replacement.
Part of my app.js:
import Echo from 'laravel-echo';
import pusher from 'pusher-js';
window.laravelEcho = new Echo({
broadcaster: 'pusher',
key: "app-key",
wsHost: "mysite.ddev.site",
wsPort: 9000,
wssPort: 9000,
forceTLS: false,
encrypted: true,
disableStats: true,
enabledTransports: ['ws', 'wss'],
});
/resources/views/app.blade.php
<!doctype html>
<html>
<head>
<title>Laravel, Vue, Inertia and Vite</title>
<meta name="viewport" content="width=device-width,initial-scale=1" />
<meta charset="UTF-8" />
#include('partials/favicon')
<link rel="stylesheet" href="/dist/fonts/materialdesignicons.min.css" />
#production
#php
$manifest = json_decode(file_get_contents(
public_path('dist/manifest.json')
), JSON_OBJECT_AS_ARRAY);
#endphp
<script type="module" src="/dist/{{ $manifest['resources/js/app.js']['file'] }}"></script>
<link rel="stylesheet" href="/dist/{{ $manifest['resources/js/app.js']['css'][0] }}" />
#else
<script type="module" src="{{ env('APP_URL') }}:3000/#vite/client"></script>
<script type="module" src="{{ env('APP_URL') }}:3000/resources/js/app.js"></script>
#endproduction
<meta name="csrf-token" content="{{ csrf_token() }}">
#routes
</head>
<body>
#inertia
</body>
</html>
.env
# Toggle between "production" and "local" for vite
# "production" == vite build
# "local" == vite dev
APP_ENV=local
APP_DEBUG=true
# ....
BROADCAST_DRIVER=pusher
# ....
# Pusher config
PUSHER_HOST=127.0.0.1
PUSHER_APP_HOST=127.0.0.1
PUSHER_PORT=9000
PUSHER_APP_KEY="app-key"
PUSHER_APP_ID="app-id"
PUSHER_APP_SECRET="app-secret"
for vite dev to work.
Part of my package.json
"scripts": {
"dev": "vite serve",
"soketi": "cd .. && soketi start --config=html/soketi-config.json",
"prod": "vite build",
"build": "vite build"
},
Related
I have 3 containers, one for the DB, one for the php/apache and the last one for webpack.
My website is a Wordpress blog. I don't usually use Docker but I wanted to try. So, there is my webpack config that read my css (sass) and js, compile it, it work. But I want to add a BrowserSync for the auto reload. And it's not working the way I want. localhost:3000 is working (my home page) but whenever I go on another page (localhost:3000/whatever) It change my url to php:8080/whatever.
Here is my docker-compose
php:
build: .docker
volumes:
- ./.docker/conf/php/php.ini:/usr/local/etc/php/conf.d/php.ini
- .:/var/www/app
ports:
- "80:80"
depends_on:
- db
user: www-data
db:
image: mysql:5
ports:
- "3307:3306"
environment:
MYSQL_ALLOW_EMPTY_PASSWORD: 'yes'
webpack:
build: .docker/webpack
volumes:
- ./:/usr/src/app
ports:
- "3000:3000"
- "3001:3001"
environment:
- LOCAL_DOMAIN=php/
command: sh -c "npm install && npm run dev-server"
depends_on:
- php
My DockerFile (Webpack)
FROM node:16
WORKDIR /usr/src/app
EXPOSE 3000
EXPOSE 3001
# Port pour le devserver de Webpack
EXPOSE 8080
The webpack config
const localDomain = process.env.LOCAL_DOMAIN ? process.env.LOCAL_DOMAIN : 'http://local.websitename.com/';
let baseConfig = {
externals: {
"jquery": "jQuery"
},
plugins: [
new MiniCssExtractPlugin({
filename: '[name].css',
}),
new BrowserSyncPlugin({
proxy: localDomain,
files: [outputPath + '/*.css', themePath + '**/*.php', outputPath + '/*.js'],
injectCss: true,
}, {
reload: false,
injectCss: true
}),
new CleanWebpackPlugin(),
],
module: {
rules: [{
// CSS SASS SCSS
test: /\.(css|sass|scss)$/,
use: [
MiniCssExtractPlugin.loader,
{
loader: 'css-loader',
options: {
importLoaders: 2,
sourceMap: true,
},
},
{
loader: 'postcss-loader'
},
{
loader: 'sass-loader',
options: {
sourceMap: true,
},
},
],
},
{
test: /\.js$/,
exclude: /node_modules/,
use: ["babel-loader"]
},
{
test: /\.(jpg|jpeg|png|gif|woff|woff2|eot|ttf)$/i,
type: 'asset/resource'
},
{
test: /\.svg$/,
use: ['#svgr/webpack', 'url-loader?limit=1024'],
},
]
}
}
I tried multiple things like but it's not working.
new BrowserSyncPlugin({
host: 'localhost',
port: 3000,
proxy: 'http://localhost:8080/'
},
Can you help me, please ?
(For Info, localhost is working well, I can navigate on my website)
I greatly appreciate your effort and the time to solve unresponsive hot-reload function when trying to run Vue.js app on Docker container using Docker engine on Windows 10 while WSL2 active, please take a look at below configurations:
Vue.Setup.Dockerfile
FROM node:17-alpine
EXPOSE 8080
WORKDIR /app/frontend
RUN npm --force install -g #vue/cli#4.5.15
COPY /frontend /app/frontend
ENV PATH /app/frontend/node_modules/.bin:$PATH
CMD [ "npm", "run", "serve" ]
docker-compose.yml
version: "3.8"
services:
vue:
build:
context: .
dockerfile: dockerfiles/Vue.Setup.Dockerfile
restart: always
ports:
- "127.0.0.1:8080:8080"
container_name: vue_ui
volumes:
- ./frontend/:/app/frontend/
- /app/frontend/node_modules
environment:
- CHOKIDAR_USEPOLLING=true
vue.config.js
module.exports = {
publicPath:
process.env.NODE_ENV === "production"
? "/static/dist/"
: "http://127.0.0.1:8080",
pages: {
index: {
entry: 'src/main.js',
template: 'public/index.html',
filename: 'index.html',
title: 'QuestionTime',
chunks: ['chunk-vendors', 'chunk-common', 'index']
},
},
// Webpack configuration
devServer: {
host: "0.0.0.0",
port: "8080",
hot: true,
headers: {"Access-Control-Allow-Origin": "*"},
devMiddleware: {
publicPath: "http://127.0.0.1:8080",
writeToDisk: (filePath) => filePath.endsWith("index.html"),
},
static: {
watch: {
ignored: "/node_modules/",
usePolling: true,
},
},
client: {
webSocketURL: {
/* You need to config this option, otherwise the below error will occur
in your browser console when trying to connect to development server
from another Docker container:
WebSocket connection to 'ws://127.0.0.1:<port-number>/ws' failed
*/
hostname: "0.0.0.0",
pathname: "/ws",
port: 8080,
},
},
},
};
Note: When run the command:
docker-compose up
The below message will show:
It seems you are running Vue CLI inside a container.
Since you are using a non-root publicPath, the hot-reload socket
will not be able to infer the correct URL to connect. You should
explicitly specify the URL via devServer.public.
Access the dev server via http://localhost:<your container's
external mapped port>
FYI: the option:
devServer.public
is no longer available in Vue/cli#4 or later versions.
WORKAROUND
solution
Thanks,
I just Dockerised my first NuxtJS app using Docker and Docker-compose.
All run smoothly except that when I make a change on my local, the running docker does not reflect the changes I've made.
How do I configure the Docker container to listen to local changes in my code? Thanks:
Dockerfile:
# Dockerfile
FROM node:11.13.0-alpine
# create destination directory
RUN mkdir /myapp
WORKDIR /myapp
# Add current directory code to working directory
ADD . /myapp/
# update and install dependency
RUN apk update && apk upgrade
RUN apk add git
RUN npm install
EXPOSE 3000
ENV NUXT_HOST=0.0.0.0
ENV NUXT_PORT=3000
CMD gunicorn myapp.wsgi:application --bind $NUXT_HOST:$NUXT_PORT
Docker-compose:
version: "3"
services:
nuxt:
build: .
command: npm run dev
ports:
- "3000:3000"
environment:
- NUXT_HOST=0.0.0.0
- NUXT_PORT=3000
volumes:
- /myapp/
Nuxt.config.js:
export default {
// Global page headers (https://go.nuxtjs.dev/config-head)
head: {
title: 'myapp',
meta: [
{ charset: 'utf-8' },
{ name: 'viewport', content: 'width=device-width, initial-scale=1' },
{ hid: 'description', name: 'description', content: '' },
],
link: [{ rel: 'icon', type: 'image/x-icon', href: '/favicon.ico' }],
},
// Global CSS (https://go.nuxtjs.dev/config-css)
css: [],
// Plugins to run before rendering page (https://go.nuxtjs.dev/config-plugins)
plugins: [],
// Auto import components (https://go.nuxtjs.dev/config-components)
components: true,
// Modules for dev and build (recommended) (https://go.nuxtjs.dev/config-modules)
buildModules: [
// https://go.nuxtjs.dev/eslint
'#nuxtjs/eslint-module',
// https://go.nuxtjs.dev/stylelint
'#nuxtjs/stylelint-module',
],
// Modules (https://go.nuxtjs.dev/config-modules)
modules: [
// https://go.nuxtjs.dev/buefy
'nuxt-buefy',
// https://go.nuxtjs.dev/axios
'#nuxtjs/axios',
// https://go.nuxtjs.dev/pwa
'#nuxtjs/pwa',
// https://go.nuxtjs.dev/content
'#nuxt/content',
],
// Axios module configuration (https://go.nuxtjs.dev/config-axios)
axios: {},
// Content module configuration (https://go.nuxtjs.dev/config-content)
content: {},
// Build Configuration (https://go.nuxtjs.dev/config-build)
build: {},
watchers: {
webpack: {
poll: true
}
},
}
I have setup docker config using docker compose.
this is part of docker compose file
version: '3'
networks:
pm:
services:
consul:
container_name: consul
image: consul:latest
restart: unless-stopped
ports:
- 8300:8300
- 8301:8301
- 8302:8302
- 8400:8400
- 8500:8500
- 8600:8600
environment:
CONSUL_LOCAL_CONFIG: >-
{
"bootstrap": true,
"server": true,
"node_name": "consul1",
"bind_addr": "0.0.0.0",
"client_addr": "0.0.0.0",
"bootstrap_expect": 1,
"ui": true,
"addresses" : {
"http" : "0.0.0.0"
},
"ports": {
"http": 8500
},
"log_level": "DEBUG",
"connect" : {
"enabled" : true
}
}
volumes:
- ./data:/consul/data
command: agent -server -bind 0.0.0.0 -client 0.0.0.0 -bootstrap-expect=1
Then set the key value via browser
I would like to add the key/value as initial at new environment, so that additional setup steps at browser could be avoided.
this is the configuration i export by using consul kv command:
# consul kv export config/
[
{
"key": "config/",
"flags": 0,
"value": ""
},
{
"key": "config/drug2/",
"flags": 0,
"value": ""
},
{
"key": "config/drug2/data",
"flags": 0,
"value": "e30="
}
]
To my knowledge Docker Compose does not have a way to run a custom command/script after the containers have started.
As a workaround you could write a shell script which executes docker-compose up and then either runs consul kv import or a curl command against Consul's Transaction API to add the data you're trying to load.
Learning docker and docker-compose, running into a stickler:
Here is my docker-compose.yml file:
version: '3'
services:
site:
build:
context: "."
dockerfile: "Dockerfile-site-dev"
environment:
- "HTTPS_METHOD=noredirect"
volumes:
- "./site/:/usr/share/nginx/html"
ports:
- "8080:80"
app:
build:
context: "."
dockerfile: "Dockerfile-server-dev"
environment:
- "HTTPS_METHOD=noredirect"
volumes:
- "./app/:/app/"
ports:
- "3000:3000"
This instantiates an nginx web front-end and a nodejs/express back-end in alpine images.
The issue is when I trigger a 'get' to 'http://app:3000/service' from a webpage to the nodejs app container, it redirects to https (which fails because I don't have https set up on the container -- this is an adhoc internal test & http only is fine.)
Have tried with jquery $.get and axios.get -- same results.
I can exec into the 'site' container and ping 'app' just fine, and if I curl 'http://app:3000/testme' from the 'site' container (which just returns an "I'm Here!!!" response) it works just fine.
But something is forcing the 307 when I execute it from a page.
I'm not seeing any redirects in the nginx configuration (which would only affect page accesses, anyway), and there is nothing in my nodejs app code to trigger a redirect.
Seems like something in docker is forcing the redirect.
Note in the docker-compose file I've set the environment "HTTPS_METHOD=noredirect" on both containers, which doesn't seem to have any effect.
Any insights appreciated.
Adding:
Dockerfile-site-dev:
FROM nginx:stable-alpine
WORKDIR /usr/share/nginx/html
Dockerfile-app-dev:
FROM node:10.13-alpine
WORKDIR /app
RUN yarn global add nodemon
CMD ["sh","-c", "yarn install && nodemon ./index.js"]
On the site side, the nginx config is vanilla from the alpine base image.
Here is the index.html that triggers requests to the app server:
<!DOCTYPE html>
<html>
<head>
<title>Vote SSE Demo</title>
<style>
body {font-family:Arial, Helvetica, sans-serif;
text-align:center;}
#yes,#no,#message {font-size: 1.5em;}
button {border-radius:0.5em;background-color:lightgray;}
</style>
</head>
<body>
<h1>Will "Hello, World!" bring world peace?</h1>
<div><button id="yes">Yes</button></div><br>
<div><button id="no">No</button></div>
<br/><br/>
<div id="message"></div>
</body>
<script src="./jquery-2.1.4.min.js"></script>
<script src="./axios.min.js"></script>
<script>
function vote(yes) {
console.log('voting: ' + (yes? 'yes' : 'no'));
axios.get("http://app:3000/vote?yes=" + yes)
.then(function(rsp) {
$("#yes").hide();
$("#no").hide();
$("#message").text("Thank you for your vote!");
})
.catch(function(err) {
console.log('Axios get error: ' + err);
});
}
$("#yes").on("click", function(){
vote(true);
});
$("#no").on("click", function() {
vote(false);
});
</script>
</html>
For the app side, here is the package.json file:
{
"name": "callbacks",
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"dependencies": {
"express": "^4.16.4"
}
}
And here is the index.js:
const express = require('express');
const sse = require('./sse');
const app = express()
let connections = [],
votes = {yes: 0, no: 0};
app.use(sse);
app.get('/vote', function(req, res) {
console.log('server in vote...');
let i;
if (req.query.yes === 'true')
votes.yes++;
else
votes.no++;
for (i = 0; ix < connections.length; ++i) {
connections[i].sseSend(votes);
}
res.sendStatus(200);
});
app.get('/stream', function(req, res) {
res.sseSetup();
res.sseSend(votes);
connections.push(res);
});
app.get('/testme', function(req, res) {
console.log('server: in testme');
res.send("I'm here!!!\n");
})
app.listen(3000);
console.log('Listening on port 3000');
SSE is ServerEvent, which is what I'm trying to test.
Otherwise, all pretty basic.
I do think docker is suspect, somehow, by virtue of the fact that I can curl the app from the site container no issues (or maybe that is why it's not suspect...)
The html runs on the browser (outside of docker).
The browser is not aware of any "app" host.
So in your html, replace:
axios.get("http://app:3000/vote?yes=" + yes)
with
axios.get("http://localhost:3000/vote?yes=" + yes)