Next.js getServerSideProps is not working using docker - docker

Currently, I am trying to fetch data after communicating with the backend server through SSR through Next.js and sprinkle it on the page.
However, one problem is that when I run npm run dev locally, getServerSideProps runs well, but when I upload it to a container through Docker-compose, my API for getServerSideProps doesn't work.
const ResultMenu: NextPage<Props> = ({ CategoryData }) => {
...
};
export default ResultMenu;
export const getServerSideProps: GetServerSideProps = async (context) => {
try {
const { menuId } = context.query;
const response = await axios.get<CategoryType[]>(
"http://localhost:8080/category/get/menu",
{
headers: {
"Content-Type": "application/json",
"Access-Control-Allow-Origin": "*",
},
params: {
menuId: menuId,
},
}
);
const data = response.data;
console.log(data);
return {
props: {
CategoryData: data,
},
};
} catch (err) {
console.log(err);
return {
props: {},
};
}
};
In the frontend container, this error code appears
Error: connect ECONNREFUSED 127.0.0.1:8080
at TCPConnectWrap.afterConnect [as oncomplete] (node:net:1195:16) {
errno: -111,
code: 'ECONNREFUSED',
syscall: 'connect',
address: '127.0.0.1',
port: 8080,
config: {
transitional: {
silentJSONParsing: true,
forcedJSONParsing: true,
clarifyTimeoutError: false
},
adapter: [Function: httpAdapter],
transformRequest: [ [Function: transformRequest] ],
transformResponse: [ [Function: transformResponse] ],
timeout: 0,
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
maxBodyLength: -1,
validateStatus: [Function: validateStatus],
headers: {
Accept: 'application/json, text/plain, */*',
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
'User-Agent': 'axios/0.26.1'
},
params: { menuId: '1' },
method: 'get',
url: 'http://localhost:8080/category/get/menu',
data: undefined
},
request: <ref *1> Writable {
_writableState: WritableState {
objectMode: false,
highWaterMark: 16384,
finalCalled: false,
needDrain: false,
ending: false,
ended: false,
finished: false,
destroyed: false,
decodeStrings: true,
defaultEncoding: 'utf8',
length: 0,
writing: false,
corked: 0,
sync: true,
bufferProcessing: false,
onwrite: [Function: bound onwrite],
writecb: null,
writelen: 0,
afterWriteTickInfo: null,
buffered: [],
bufferedIndex: 0,
allBuffers: true,
allNoop: true,
pendingcb: 0,
constructed: true,
prefinished: false,
errorEmitted: false,
emitClose: true,
autoDestroy: true,
errored: null,
closed: false,
closeEmitted: false,
[Symbol(kOnFinished)]: []
},
_events: [Object: null prototype] {
response: [Function: handleResponse],
error: [Function: handleRequestError],
socket: [Function: handleRequestSocket]
},
_eventsCount: 3,
_maxListeners: undefined,
_options: {
maxRedirects: 21,
maxBodyLength: 10485760,
protocol: 'http:',
path: '/category/get/menu?menuId=1',
method: 'GET',
headers: [Object],
agent: undefined,
agents: [Object],
auth: undefined,
hostname: 'localhost',
port: '8080',
nativeProtocols: [Object],
pathname: '/category/get/menu',
search: '?menuId=1'
},
_ended: true,
_ending: true,
_redirectCount: 0,
_redirects: [],
_requestBodyLength: 0,
_requestBodyBuffers: [],
_onNativeResponse: [Function (anonymous)],
_currentRequest: ClientRequest {
_events: [Object: null prototype],
_eventsCount: 7,
_maxListeners: undefined,
outputData: [],
outputSize: 0,
writable: true,
destroyed: false,
_last: true,
chunkedEncoding: false,
shouldKeepAlive: false,
maxRequestsOnConnectionReached: false,
_defaultKeepAlive: true,
useChunkedEncodingByDefault: false,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: 0,
_hasBody: true,
_trailer: '',
finished: true,
_headerSent: true,
_closed: false,
socket: [Socket],
_header: 'GET /category/get/menu?menuId=1 HTTP/1.1\r\n' +
'Accept: application/json, text/plain, */*\r\n' +
'Content-Type: application/json\r\n' +
'Access-Control-Allow-Origin: *\r\n' +
'User-Agent: axios/0.26.1\r\n' +
'Host: localhost:8080\r\n' +
'Connection: close\r\n' +
'\r\n',
_keepAliveTimeout: 0,
_onPendingData: [Function: nop],
agent: [Agent],
socketPath: undefined,
method: 'GET',
maxHeaderSize: undefined,
insecureHTTPParser: undefined,
path: '/category/get/menu?menuId=1',
_ended: false,
res: null,
aborted: false,
timeoutCb: null,
upgradeOrConnect: false,
parser: null,
maxHeadersCount: null,
reusedSocket: false,
host: 'localhost',
protocol: 'http:',
_redirectable: [Circular *1],
[Symbol(kCapture)]: false,
[Symbol(kNeedDrain)]: false,
[Symbol(corked)]: 0,
[Symbol(kOutHeaders)]: [Object: null prototype]
},
_currentUrl: 'http://localhost:8080/category/get/menu?menuId=1',
[Symbol(kCapture)]: false
},
response: undefined,
isAxiosError: true,
toJSON: [Function: toJSON]
}
1
undefined
Dockerfile
FROM node:12
ENV PORT 3000
# Create app directory
RUN mkdir -p /usr/src/app
WORKDIR /usr/src/app
# Installing dependencies
COPY package*.json /usr/src/app/
RUN npm install
# Copying source files
COPY . /usr/src/app
# Building app
RUN npm run build
EXPOSE 3000
# Running the app
CMD "npm" "run" "dev"
docker-compose.yml
version: "3"
services:
frontend:
container_name: frontend
build:
context: ./frontend
dockerfile: Dockerfile
ports:
- "3000:3000"
volumes:
- ./frontend:/usr/src/app
- /usr/src/app/node_modules
- /usr/src/app/.next
networks:
- network-tier
backend:
container_name: backend
build: ./backend
depends_on:
- mysqldb
ports:
- 8080:8080
environment:
spring.datasource.url: "jdbc:mysql://mysqldb:3306/test_db?useSSL=false&useLegacyDatetimeCode=false&allowPublicKeyRetrieval=true&serverTimezone=Asia/Seoul&characterEncoding=UTF-8&autoReconnect=true&createDatabaseIfNotExist=true"
volumes:
- ./menu-canvas:/usr/src/backend
networks:
- network-tier
tty: true
mysqldb:
image: mysql:5.7
container_name: mysqldb
environment:
MYSQL_DATABASE: test_db
MYSQL_ROOT_PASSWORD: "0000"
MYSQL_ROOT_HOST: "%"
CHARACTER_SET_SERVER: utf8
command:
[
"--character-set-server=utf8mb4",
"--collation-server=utf8mb4_unicode_ci",
]
volumes:
- ./menu-canvas:/usr/src/db
ports:
- "3306:3306"
networks:
- network-tier
platform: linux/amd64
networks:
network-tier:
external: true
volumes:
menu-canvas:
next.config.js
/** #type {import('next').NextConfig} */
const nextConfig = {
images: {
domains: ["localhost", "*"],
},
reactStrictMode: true,
};
module.exports = nextConfig;
package.json
{
"name": "frontend",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start",
"lint": "next lint"
},
"dependencies": {
...
},
"devDependencies": {
...
},
"proxy": {
"*": {
"target": "http://localhost:8080"
}
}
}
I've been thinking a lot all day, but I don't know how... Help!

When running the getServerSideProps the communication would be container to container. So for these requests, routes to localhost would not work. Instead it would need to be something like backend:8080 (when communicating from the frontend service to the backend service).
The localhost domain would work as expected on the client side requests assuming you were exposing all ports as expected.
When you run this without docker everything works because all services are on localhost. When run via docker-compose they no longer are.
If you really wanted to use localhost instead of the service name you may be able to use a network host https://docs.docker.com/network/host/

Related

NightwatchJs is ignoring chrome options when trying to launch no sandbox and headless on docker

I am having an issue running nightwatch on a docker container using chrome, nightwatch seems to be completely ignoring the fact that i have told it to use --no-sandbox and --headless
My config for nightwatch.conf.js:
let localConfig = require('./nightwatch.local.conf');
module.exports = {
src_folders: ["tests"],
screenshots: {
enabled: true,
path: "screenshots"
},
webdriver: {
start_process: true,
server_path: 'node_modules/.bin/chromedriver',
port: 9195
},
test_settings: {
twlocal: localConfig,
twint:{
launch_url: "https://int.ecample.co.uk",
desiredCapabilities: {
browserName: "chrome",
acceptSslCerts: true,
acceptInsecureCerts: true
}
},
twstaging:{
launch_url: "https://staging.example.co.uk",
desiredCapabilities: {
browserName: "chrome",
acceptSslCerts: true,
acceptInsecureCerts: true,
chromeOptions:{
args:['no-sandbox','headless','disable-gpu']
}
}
},
twlive:{
launch_url: "https://example.co.uk",
desiredCapabilities: {
browserName: "chrome",
acceptSslCerts: true
}
}
}
};
The TWSTAGING config i have set the chromeOptions arguments but the command it's running on the container is:
/usr/bin/google-chrome --allow-pre-commit-input --disable-background-networking --disable-client-side-phishing-detection --disable-default-apps --disable-hang-monitor --disable-popup-blocking --disable-prompt-on-repost --disable-sync --enable-automation --enable-blink-features=ShadowDOMV0 --enable-logging=stderr --ignore-certificate-errors --log-level=0 --no-first-run --no-service-autorun --password-store=basic --remote-debugging-port=0 --test-type=webdriver --use-mock-keychain --user-data-dir=/tmp/.com.google.Chrome.Zkxwkv
Which is producing this error:
Running as root without --no-sandbox is not supported.
I'm not sure why it's ignoring the arguments? This is just running: nightwatch --env twstaging
Have you tried adding the goog prefix to chromeOptions, I've copied your whole config but the only changes are in twstaging. Please note the fun with quotations due to the colon:
let localConfig = require('./nightwatch.local.conf');
module.exports = {
src_folders: ["tests"],
screenshots: {
enabled: true,
path: "screenshots"
},
webdriver: {
start_process: true,
server_path: 'node_modules/.bin/chromedriver',
port: 9195
},
test_settings: {
twlocal: localConfig,
twint:{
launch_url: "https://int.ecample.co.uk",
desiredCapabilities: {
browserName: "chrome",
acceptSslCerts: true,
acceptInsecureCerts: true
}
},
twstaging:{
launch_url: "https://staging.example.co.uk",
desiredCapabilities: {
browserName: "chrome",
acceptSslCerts: true,
acceptInsecureCerts: true,
"goog:chromeOptions":{
args:['no-sandbox','headless','disable-gpu']
}
}
},
twlive:{
launch_url: "https://example.co.uk",
desiredCapabilities: {
browserName: "chrome",
acceptSslCerts: true
}
}
}
};

When I make a request with axios in nuxt app, I got: getaddrinfo ENOTFOUND backend.localhost

I need your help and thank you in advance !
I am trying to make a call to the api platform in a nuxt app through axios but I got error of getaddrinfo ENOTFOUND backend.localhost when trying to make a get request with axios in my nuxt app.
My configuration is:
api platform is dockerized through symfony and used as the backend
traefik is used as reverse proxy
nuxt is not dockerized and used as frontend
Here is how my docker-compose looks like:
version: "3.8"
services:
backend:
build:
context: ./
dockerfile: ./infra/docker/backend/Dockerfile.dev
user: "${PUID}:${PGID}"
restart: unless-stopped
environment:
DATABASE_CLIENT: ${DATABASE_CLIENT}
DATABASE_NAME: ${DATABASE_NAME}
DATABASE_HOST: ${DATABASE_HOST}
DATABASE_PORT: ${DATABASE_PORT}
DATABASE_USERNAME: ${DATABASE_USERNAME}
DATABASE_PASSWORD: ${DATABASE_PASSWORD}
expose:
- 80
depends_on:
- mysql
volumes:
- ./backend:/app:rw,cached
labels:
- "traefik.enable=true"
- "traefik.http.routers.backend.rule=Host(`backend.localhost`)"
- "traefik.http.routers.backend.entrypoints=web"
sysctls:
- net.ipv4.ip_unprivileged_port_start=0
portainer:
image: portainer/portainer
restart: unless-stopped
command: --no-auth -H unix:///var/run/docker.sock
expose:
- 9000
volumes:
- /var/run/docker.sock:/var/run/docker.sock
labels:
- "traefik.enable=true"
- "traefik.http.routers.portainer.rule=Host(`portainer.localhost`)"
- "traefik.http.routers.portainer.entrypoints=web"
traefik:
image: "traefik:v2.2.5"
command:
- "--log.level=DEBUG"
- "--api.insecure=true"
# Enabling docker provider
- "--providers.docker=true"
# Do not expose containers unless explicitly told so
- "--providers.docker.exposedbydefault=false"
- "--entrypoints.web.address=:80"
ports:
- "80:80"
- "8080:8080"
labels:
- "traefik.enable=true"
- "traefik.http.routers.traefik.entrypoints=web"
- "traefik.http.routers.traefik.rule=Host(`monitor.localhost`)"
- "traefik.http.services.traefik.loadbalancer.server.port=8080"
volumes:
- "/var/run/docker.sock:/var/run/docker.sock:ro"
My nuxt.config.js:
...
axios: {
baseURL: 'http://backend.localhost/api'
}
....
I can access through the browser to the api UI via http://backend.localhost/api and everything works well there.
But making a request in my nuxt app code, leads to the error below:
Error: getaddrinfo ENOTFOUND backend.localhost
at GetAddrInfoReqWrap.onlookup [as oncomplete] (node:dns:69:26) {
errno: -3008,
code: 'ENOTFOUND',
syscall: 'getaddrinfo',
hostname: 'backend.localhost',
config: {
url: '/users',
method: 'get',
headers: {
connection: 'keep-alive',
'cache-control': 'max-age=0',
'sec-ch-ua': '"Chromium";v="94", "Google Chrome";v="94", ";Not A Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'upgrade-insecure-requests': '1',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36',
'sec-fetch-site': 'same-origin',
'sec-fetch-mode': 'navigate',
'sec-fetch-dest': 'document',
referer: 'http://localhost:3000/',
'accept-encoding': 'gzip, deflate',
'accept-language': 'en-GB,en;q=0.9,fr-FR;q=0.8,fr;q=0.7,en-US;q=0.6',
cookie: 'phpMyAdmin=46260be5bb513da7351adb73a0f4af8e; io=7IL9SV9Qq5sBDsGzAAAD; pmaAuth-1=%7B%22iv%22%3A%22yjzw8rQARGbpPBqg6BiNlQ%3D%3D%22%2C%22mac%22%3A%2213692b36a201adb505c7ce76926414461029f705%22%2C%22payload%22%3A%22oYBIB7LMEU96C4T5X6RxgQ%3D%3D%22%7D; i18n_redirected=fr',
'if-none-match': '"46616-dbF9UDJNzxIn7dMVplznpaaPF6s"',
Accept: 'application/json, text/plain, */*'
},
baseURL: 'http://backend.localhost/api',
transformRequest: [ [Function: transformRequest] ],
transformResponse: [ [Function: transformResponse] ],
timeout: 0,
adapter: [Function: httpAdapter],
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
maxBodyLength: -1,
validateStatus: [Function: validateStatus],
transitional: {
silentJSONParsing: true,
forcedJSONParsing: true,
clarifyTimeoutError: false
},
data: undefined
},
request: <ref *1> Writable {
_writableState: WritableState {
objectMode: false,
highWaterMark: 16384,
finalCalled: false,
needDrain: false,
ending: false,
ended: false,
finished: false,
destroyed: false,
decodeStrings: true,
defaultEncoding: 'utf8',
length: 0,
writing: false,
corked: 0,
sync: true,
bufferProcessing: false,
onwrite: [Function: bound onwrite],
writecb: null,
writelen: 0,
afterWriteTickInfo: null,
buffered: [],
bufferedIndex: 0,
allBuffers: true,
allNoop: true,
pendingcb: 0,
constructed: true,
prefinished: false,
errorEmitted: false,
emitClose: true,
autoDestroy: true,
errored: null,
closed: false,
closeEmitted: false,
[Symbol(kOnFinished)]: []
},
_events: [Object: null prototype] {
response: [Function: handleResponse],
error: [Function: handleRequestError]
},
_eventsCount: 2,
_maxListeners: undefined,
_options: {
maxRedirects: 21,
maxBodyLength: 10485760,
protocol: 'http:',
path: '/api/users',
method: 'GET',
headers: [Object],
agent: undefined,
agents: [Object],
auth: undefined,
hostname: 'backend.localhost',
port: null,
nativeProtocols: [Object],
pathname: '/api/users'
},
_ended: true,
_ending: true,
_redirectCount: 0,
_redirects: [],
_requestBodyLength: 0,
_requestBodyBuffers: [],
_onNativeResponse: [Function (anonymous)],
_currentRequest: ClientRequest {
_events: [Object: null prototype],
_eventsCount: 7,
_maxListeners: undefined,
outputData: [],
outputSize: 0,
writable: true,
destroyed: false,
_last: true,
chunkedEncoding: false,
shouldKeepAlive: true,
_defaultKeepAlive: true,
useChunkedEncodingByDefault: false,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: 0,
_hasBody: true,
_trailer: '',
finished: true,
_headerSent: true,
_closed: false,
socket: [Socket],
_header: 'GET /api/users HTTP/1.1\r\n' +
'connection: keep-alive\r\n' +
'cache-control: max-age=0\r\n' +
'sec-ch-ua: "Chromium";v="94", "Google Chrome";v="94", ";Not A Brand";v="99"\r\n' +
'sec-ch-ua-mobile: ?0\r\n' +
'sec-ch-ua-platform: "Windows"\r\n' +
'upgrade-insecure-requests: 1\r\n' +
'user-agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36\r\n' +
'sec-fetch-site: same-origin\r\n' +
'sec-fetch-mode: navigate\r\n' +
'sec-fetch-dest: document\r\n' +
'referer: http://localhost:3000/\r\n' +
'accept-encoding: gzip, deflate\r\n' +
'accept-language: en-GB,en;q=0.9,fr-FR;q=0.8,fr;q=0.7,en-US;q=0.6\r\n' +
'cookie: phpMyAdmin=46260be5bb513da7351adb73a0f4af8e; io=7IL9SV9Qq5sBDsGzAAAD; pmaAuth-1=%7B%22iv%22%3A%22yjzw8rQARGbpPBqg6BiNlQ%3D%3D%22%2C%22mac%22%3A%2213692b36a201adb505c7ce76926414461029f705%22%2C%22payload%22%3A%22oYBIB7LMEU96C4T5X6RxgQ%3D%3D%22%7D; i18n_redirected=fr\r\n' +
'if-none-match: "46616-dbF9UDJNzxIn7dMVplznpaaPF6s"\r\n' +
'Accept: application/json, text/plain, */*\r\n' +
'Host: backend.localhost\r\n' +
'\r\n',
_keepAliveTimeout: 0,
_onPendingData: [Object],
agent: [Agent],
socketPath: undefined,
method: 'GET',
maxHeaderSize: undefined,
insecureHTTPParser: undefined,
path: '/api/users',
_ended: false,
res: null,
aborted: false,
timeoutCb: null,
upgradeOrConnect: false,
parser: null,
maxHeadersCount: null,
reusedSocket: false,
host: 'backend.localhost',
protocol: 'http:',
_redirectable: [Circular *1],
[Symbol(kCapture)]: false,
[Symbol(kNeedDrain)]: false,
[Symbol(corked)]: 0,
[Symbol(kOutHeaders)]: [Object: null prototype]
},
_currentUrl: 'http://backend.localhost/api/users',
[Symbol(kCapture)]: false
},
response: undefined,
isAxiosError: true,
toJSON: [Function: toJSON]
}
Here is the way I made the request:
async fetch() {
....
await this.$axios.$get('/users')
...
}
I found the root cause of the issue.
Indeed, I am using a windows 10 Family edition. That windows version requires installing window sub linux wsl + docker desktop to be able to use docker.
So, using docker with wsl makes my backend.localhost NOT FOUND because docker is isolated into wsl and communicates with my host only by its own ipV4 that you can find by opening the wsl terminal and typing ipconfig.exe and locating ipV4 in ethernet vEthernet (WSL).
Replace the backend.localhost by that ip and exposing your backend service to the port 81 as ipV4:81 => you get everything working.

Unable to port forward with the port range syntax in Docker SDK

I am trying to run a Docker container using Go SDK. From the terminal, I can run the following command with no issues :
docker run -d --memory 1024M --name "cdb1" -p 2001-2006:8091-8096 -p 11210-11211:11210-11211 couchbase
I want to achieve the same thing using the Docker SDK for Go but cannot find how to reproduce the -p 2001-2006:8091-8096 part. Here is my ContainerCreate call :
cont, err := cli.ContainerCreate(
context.Background(),
&container.Config{
Image: "couchbase",
ExposedPorts: nat.PortSet{
"2001-2006": struct{}{},
"11210-11211": struct{}{},
},
},
&container.HostConfig{
PortBindings: nat.PortMap{
"8091-8096": []nat.PortBinding{
{
HostIP: "0.0.0.0",
HostPort: "2001-2006",
},
},
"11210-11211": []nat.PortBinding{
{
HostIP: "0.0.0.0",
HostPort: "11210-11211",
},
},
},
Resources: container.Resources{
Memory: 1024 * 1000000,
},
},
nil,
"cdb1",
)
But running this always throw the same error :
Error response from daemon: invalid port specification: "8091-8096"
Doing some more testing, the error seems to come specifically from the PortBindings part (if I remove this one and leave the exposed ports, it works fine).
I couldn't find anything about this on Docker documentation.
nat.PortSet is a map with nat.Port being its key type:
type PortSet map[Port]struct{}
Your "port" specification of "2001-2006" syntactically works because it's an untyped string constant and can be converted to nat.Port which has string as its underlying type, but this string literal lacks the protocol specification (e.g. tcp or udp).
Instead use the nat.NewPort() function to create the key:
ports1, err := nat.NewPort("tcp", "2001-2006") // check err
ports2, err := nat.NewPort("tcp", "11210-11211") // check err
ExposedPorts: nat.PortSet{
ports1: struct{}{},
ports2: struct{}{},
},
Note that the expected "raw" string format is "2001-2006/tcp" which would also be accepted, but it's better to leave this internal detail to nat.NewPort().
And to construct a nat.PortMap, use the nat.ParsePortSpec() utility function. This is how you can assemble your PortBindings:
portBindings := nat.PortMap{}
for _, rawMapping := range []string{
"0.0.0.0:2001-2006:8091-8096",
"0.0.0.0:11210-11211:11210-11211",
} {
mappings, err := nat.ParsePortSpec(rawMapping)
if err != nil {
panic(err)
}
for _, pm := range mappings {
portBindings[pm.Port] = []nat.PortBinding{pm.Binding}
}
}
Then you can use the portBindings above for HostConfig.PortBindings field:
&container.HostConfig{
PortBindings: portBindings ,
Resources: container.Resources{
Memory: 1024 * 1000000,
},
},
Try to add each port individually instead of the ports range:
cont, err := cli.ContainerCreate(
context.Background(),
&container.Config{
Image: "couchbase",
ExposedPorts: nat.PortSet{
"2001": struct{}{},
"2002": struct{}{},
"2003": struct{}{},
"2004": struct{}{},
"2005": struct{}{},
"2006": struct{}{},
"11210": struct{}{},
"11211": struct{}{}
},
},
&container.HostConfig{
PortBindings: nat.PortMap{
"8091": []nat.PortBinding{
{
HostIP: "0.0.0.0",
HostPort: "2001",
},
},
"8092": []nat.PortBinding{
{
HostIP: "0.0.0.0",
HostPort: "2002",
},
},
"8093": []nat.PortBinding{
{
HostIP: "0.0.0.0",
HostPort: "2003",
},
},
"8094": []nat.PortBinding{
{
HostIP: "0.0.0.0",
HostPort: "2004",
},
},
"8095": []nat.PortBinding{
{
HostIP: "0.0.0.0",
HostPort: "2005",
},
},
"8096": []nat.PortBinding{
{
HostIP: "0.0.0.0",
HostPort: "2006",
},
},
"11210": []nat.PortBinding{
{
HostIP: "0.0.0.0",
HostPort: "11210",
},
},
"11211": []nat.PortBinding{
{
HostIP: "0.0.0.0",
HostPort: "11211",
},
}
},
Resources: container.Resources{
Memory: 1024 * 1000000,
},
},
nil,
"cdb1",
)

Error Status 400 When Uploading to Youtube API v3

I keep getting an error when trying to POST to Youtube v3 API.
I'm trying to get a response URI, so I can upload a Youtube video.
This is the documentation I'm referencing: https://developers.google.com/youtube/v3/docs/videos/insert#go
Does anyone know what I'm doing wrong? Error log is below.
My code:
axios({
method: 'POST',
baseURL: 'https://www.googleapis.com',
url: '/upload/youtube/v3/videos',
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json',
'Content-Length': 167,
'X-Upload-Content-Length': 302080,
'X-Upload-Content-Type': 'video/mp4',
'Authorization': `Bearer <MY_ACCESS_TOKEN>`, // has my actual access_token
},
params: {
'uploadType': 'resumable',
'key': <MY_API_KEY>, // has my actual app API key
'part': 'snippet,status'
},
data: {
'snippet': {
'title': 'Test Upload 1',
'description': 'Test Description 1',
'tags': ['tag1', 'tag2'],
},
'status': {
'privacyStatus': 'private',
}
}
})
.then(response => {
res.json(response.data);
})
.catch(err => console.log(err));
Error Log:
Error: Request failed with status code 400
at createError (/mnt/e/Dev/20200316_youtube_api/node_modules/axios/lib/core/createError.js:16:15)
at settle (/mnt/e/Dev/20200316_youtube_api/node_modules/axios/lib/core/settle.js:17:12)
at IncomingMessage.handleStreamEnd (/mnt/e/Dev/20200316_youtube_api/node_modules/axios/lib/adapters/http.js:236:11)
at IncomingMessage.emit (events.js:323:22)
at endReadableNT (_stream_readable.js:1204:12)
at processTicksAndRejections (internal/process/task_queues.js:84:21) {
config: {
url: '/upload/youtube/v3/videos',
method: 'post',
params: {
key: '<MY_API_KEY>',
part: 'snippet,status'
},
data: '{"snippet":{"title":"Test Upload 1","description":"Test Description 1","tags":["tag1","tag2"]},"status":{"privacyStatus":"private"}}',
headers: {
Accept: 'application/json',
'Content-Type': 'application/json',
'Content-Length': 132,
'X-Upload-Content-Length': 302080,
'X-Upload-Content-Type': 'video/mp4',
Authorization: 'Bearer <MY_ACCESS_TOKEN>',
'User-Agent': 'axios/0.19.2'
},
baseURL: 'https://www.googleapis.com',
transformRequest: [ [Function: transformRequest] ],
transformResponse: [ [Function: transformResponse] ],
timeout: 0,
adapter: [Function: httpAdapter],
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
validateStatus: [Function: validateStatus]
},
request: ClientRequest {
_events: [Object: null prototype] {
socket: [Function],
abort: [Function],
aborted: [Function],
error: [Function],
timeout: [Function],
prefinish: [Function: requestOnPrefinish]
},
_eventsCount: 6,
_maxListeners: undefined,
outputData: [],
outputSize: 0,
writable: true,
_last: true,
chunkedEncoding: false,
shouldKeepAlive: false,
useChunkedEncodingByDefault: true,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: null,
_hasBody: true,
_trailer: '',
finished: true,
_headerSent: true,
socket: TLSSocket {
_tlsOptions: [Object],
_secureEstablished: true,
_securePending: false,
_newSessionPending: false,
_controlReleased: true,
_SNICallback: null,
servername: 'www.googleapis.com',
alpnProtocol: false,
authorized: true,
authorizationError: null,
encrypted: true,
_events: [Object: null prototype],
_eventsCount: 9,
connecting: false,
_hadError: false,
_parent: null,
_host: 'www.googleapis.com',
_readableState: [ReadableState],
readable: true,
_maxListeners: undefined,
_writableState: [WritableState],
writable: false,
allowHalfOpen: false,
_sockname: null,
_pendingData: null,
_pendingEncoding: '',
server: undefined,
_server: null,
ssl: [TLSWrap],
_requestCert: true,
_rejectUnauthorized: true,
parser: null,
_httpMessage: [Circular],
[Symbol(res)]: [TLSWrap],
[Symbol(asyncId)]: 6,
[Symbol(kHandle)]: [TLSWrap],
[Symbol(lastWriteQueueSize)]: 0,
[Symbol(timeout)]: null,
[Symbol(kBuffer)]: null,
[Symbol(kBufferCb)]: null,
[Symbol(kBufferGen)]: null,
[Symbol(kCapture)]: false,
[Symbol(kBytesRead)]: 0,
[Symbol(kBytesWritten)]: 0,
[Symbol(connect-options)]: [Object]
},
connection: TLSSocket {
_tlsOptions: [Object],
_secureEstablished: true,
_securePending: false,
_newSessionPending: false,
_controlReleased: true,
_SNICallback: null,
servername: 'www.googleapis.com',
alpnProtocol: false,
authorized: true,
authorizationError: null,
encrypted: true,
_events: [Object: null prototype],
_eventsCount: 9,
connecting: false,
_hadError: false,
_parent: null,
_host: 'www.googleapis.com',
_readableState: [ReadableState],
readable: true,
_maxListeners: undefined,
_writableState: [WritableState],
writable: false,
allowHalfOpen: false,
_sockname: null,
_pendingData: null,
_pendingEncoding: '',
server: undefined,
_server: null,
ssl: [TLSWrap],
_requestCert: true,
_rejectUnauthorized: true,
parser: null,
_httpMessage: [Circular],
[Symbol(res)]: [TLSWrap],
[Symbol(asyncId)]: 6,
[Symbol(kHandle)]: [TLSWrap],
[Symbol(lastWriteQueueSize)]: 0,
[Symbol(timeout)]: null,
[Symbol(kBuffer)]: null,
[Symbol(kBufferCb)]: null,
[Symbol(kBufferGen)]: null,
[Symbol(kCapture)]: false,
[Symbol(kBytesRead)]: 0,
[Symbol(kBytesWritten)]: 0,
[Symbol(connect-options)]: [Object]
},
_header: 'POST /upload/youtube/v3/videos?key=<MY_API_KEY>&part=snippet,status HTTP/1.1\r\n' +
'Accept: application/json\r\n' +
'Content-Type: application/json\r\n' +
'Content-Length: 132\r\n' +
'X-Upload-Content-Length: 302080\r\n' +
'X-Upload-Content-Type: video/mp4\r\n' +
'Authorization: Bearer <MY_ACCESS_TOKEN>\r\n' +
'User-Agent: axios/0.19.2\r\n' +
'Host: www.googleapis.com\r\n' +
'Connection: close\r\n' +
'\r\n',
_onPendingData: [Function: noopPendingOutput],
agent: Agent {
_events: [Object: null prototype],
_eventsCount: 2,
_maxListeners: undefined,
defaultPort: 443,
protocol: 'https:',
options: [Object],
requests: {},
sockets: [Object],
freeSockets: {},
keepAliveMsecs: 1000,
keepAlive: false,
maxSockets: Infinity,
maxFreeSockets: 256,
maxCachedSessions: 100,
_sessionCache: [Object],
[Symbol(kCapture)]: false
},
socketPath: undefined,
method: 'POST',
insecureHTTPParser: undefined,
path: '/upload/youtube/v3/videos?key=<MY_API_KEY>&part=snippet,status',
_ended: true,
res: IncomingMessage {
_readableState: [ReadableState],
readable: false,
_events: [Object: null prototype],
_eventsCount: 3,
_maxListeners: undefined,
socket: [TLSSocket],
connection: [TLSSocket],
httpVersionMajor: 1,
httpVersionMinor: 1,
httpVersion: '1.1',
complete: true,
headers: [Object],
rawHeaders: [Array],
trailers: {},
rawTrailers: [],
aborted: false,
upgrade: false,
url: '',
method: null,
statusCode: 400,
statusMessage: 'Bad Request',
client: [TLSSocket],
_consuming: false,
_dumped: false,
req: [Circular],
responseUrl: 'https://www.googleapis.com/upload/youtube/v3/videos?key=<MY_API_KEY>&part=snippet,status',
redirects: [],
[Symbol(kCapture)]: false
},
aborted: false,
timeoutCb: null,
upgradeOrConnect: false,
parser: null,
maxHeadersCount: null,
reusedSocket: false,
_redirectable: Writable {
_writableState: [WritableState],
writable: true,
_events: [Object: null prototype],
_eventsCount: 2,
_maxListeners: undefined,
_options: [Object],
_redirectCount: 0,
_redirects: [],
_requestBodyLength: 132,
_requestBodyBuffers: [],
_onNativeResponse: [Function],
_currentRequest: [Circular],
_currentUrl: 'https://www.googleapis.com/upload/youtube/v3/videos?key=<MY_API_KEY>&part=snippet,status',
[Symbol(kCapture)]: false
},
[Symbol(kCapture)]: false,
[Symbol(kNeedDrain)]: false,
[Symbol(corked)]: 0,
[Symbol(kOutHeaders)]: [Object: null prototype] {
accept: [Array],
'content-type': [Array],
'content-length': [Array],
'x-upload-content-length': [Array],
'x-upload-content-type': [Array],
authorization: [Array],
'user-agent': [Array],
host: [Array]
}
},
response: {
status: 400,
statusText: 'Bad Request',
headers: {
'x-guploader-uploadid': '<REPONSE_ID_THAT_I_REMOVED>',
vary: 'Origin, X-Origin',
'content-type': 'application/json; charset=UTF-8',
'content-length': '353',
date: 'Sun, 07 Jun 2020 22:45:50 GMT',
server: 'UploadServer',
'alt-svc': 'h3-27=":443"; ma=2592000,h3-25=":443"; ma=2592000,h3-T050=":443"; ma=2592000,h3-Q050=":443"; ma=2592000,h3-Q049=":443";
ma=2592000,h3-Q048=":443"; ma=2592000,h3-Q046=":443"; ma=2592000,h3-Q043=":443"; ma=2592000,quic=":443"; ma=2592000; v="46,43"',
connection: 'close'
},
config: {
url: '/upload/youtube/v3/videos',
method: 'post',
params: [Object],
data: '{"snippet":{"title":"Test Upload 1","description":"Test Description 1","tags":["tag1","tag2"]},"status":{"privacyStatus":"private"}}',
headers: [Object],
baseURL: 'https://www.googleapis.com',
transformRequest: [Array],
transformResponse: [Array],
timeout: 0,
adapter: [Function: httpAdapter],
xsrfCookieName: 'XSRF-TOKEN',
xsrfHeaderName: 'X-XSRF-TOKEN',
maxContentLength: -1,
validateStatus: [Function: validateStatus]
},
request: ClientRequest {
_events: [Object: null prototype],
_eventsCount: 6,
_maxListeners: undefined,
outputData: [],
outputSize: 0,
writable: true,
_last: true,
chunkedEncoding: false,
shouldKeepAlive: false,
useChunkedEncodingByDefault: true,
sendDate: false,
_removedConnection: false,
_removedContLen: false,
_removedTE: false,
_contentLength: null,
_hasBody: true,
_trailer: '',
finished: true,
_headerSent: true,
socket: [TLSSocket],
connection: [TLSSocket],
_header: 'POST /upload/youtube/v3/videos?key=<MY_API_KEY>&part=snippet,status HTTP/1.1\r\n' +
'Accept: application/json\r\n' +
'Content-Type: application/json\r\n' +
'Content-Length: 132\r\n' +
'X-Upload-Content-Length: 302080\r\n' +
'X-Upload-Content-Type: video/mp4\r\n' +
'Authorization: Bearer <MY_ACCESS_TOKEN>\r\n' +
'User-Agent: axios/0.19.2\r\n' +
'Host: www.googleapis.com\r\n' +
'Connection: close\r\n' +
'\r\n',
_onPendingData: [Function: noopPendingOutput],
agent: [Agent],
socketPath: undefined,
method: 'POST',
insecureHTTPParser: undefined,
path: '/upload/youtube/v3/videos?key=<MY_API_KEY>&part=snippet,status',
_ended: true,
res: [IncomingMessage],
aborted: false,
timeoutCb: null,
upgradeOrConnect: false,
parser: null,
maxHeadersCount: null,
reusedSocket: false,
_redirectable: [Writable],
[Symbol(kCapture)]: false,
[Symbol(kNeedDrain)]: false,
[Symbol(corked)]: 0,
[Symbol(kOutHeaders)]: [Object: null prototype]
},
data: { error: [Object] }
},
isAxiosError: true,
toJSON: [Function]
}
That request is not for uploading video, that request returns a url to which the video can be uploaded.

Elasticsearch can't find hunspell

I'm running elasticsearch from a docker-compose.yml file:
version: '3'
services:
mongo:
image: mongo
container_name: mongo-cust-mycom
ports:
- 27017:27017
volumes:
- cust-mycom-mongo:/data/db
networks:
- cust-mycom
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:6.5.4
container_name: elasticsearch-cust-mycom
ports:
- 9200:9200
- 9300:9300
volumes:
- cust-mycom-elastic:/usr/share/elasticsearch/data
- ./cust/config/elasticsearch/config/hunspell:/usr/share/elasticsearch/config/hunspell
networks:
- cust-mycom
environment:
- cluster.name=i3-elasticsearch
- xpack.security.enabled=false
- xpack.monitoring.enabled=false
- xpack.ml.enabled=false
- xpack.graph.enabled=false
- xpack.watcher.enabled=false
restart: unless-stopped
kibana:
image: docker.elastic.co/kibana/kibana:6.5.4
container_name: kibana-cust-mycom
ports:
- 5601:5601
networks:
- cust-mycom
depends_on:
- elasticsearch
restart: unless-stopped
networks:
cust-mycom:
driver: bridge
volumes:
cust-mycom-mongo:
cust-mycom-elastic:
using docker-compose up -d.
When I try to create my desired index, using the following json:
{
"settings": {
"number_of_shards": 3,
"number_of_replicas": 2,
"analysis": {
"filter": {
"swedish_stemmer": {
"type": "hunspell",
"language": "sv_SE"
},
"ins_pattern": {
"type": "pattern_capture",
"patterns": [
"([a-zåäö]*)(prod)"
]
},
"cust_stopwords": {
"type": "stop",
"stopwords": [ "en", "ett", "det", "den" ]
}
},
"analyzer": {
"swedish_index": {
"tokenizer": "standard",
"filter": [
"lowercase",
"ins_pattern",
"swedish_stemmer"
]
},
"swedish_query": {
"tokenizer": "standard",
"filter": [
"lowercase",
"swedish_stemmer",
"cust_stopwords"
]
}
}
}
},
"mappings": {
"default": {
"properties": {
"keywords": {
"type": "text",
"store": true,
"norms": false,
"analyzer": "swedish_index",
"search_analyzer": "swedish_query"
},
"audience": {
"type": "keyword"
},
"contentExcerpt": {
"type": "text"
},
"date": {
"type": "date",
"store": true,
"format": "dateOptionalTime"
},
"validUntil": {
"type": "date",
"store": true,
"format": "dateOptionalTime"
},
"informationType": {
"type": "text",
"store": true,
"norms": false,
"analyzer": "swedish_index",
"search_analyzer": "swedish_query"
},
"mainContentOfPage": {
"type": "text",
"store": true,
"norms": false,
"analyzer": "swedish_index",
"search_analyzer": "swedish_query",
"term_vector": "with_positions_offsets"
},
"thumbnailUrl": {
"type": "keyword",
"store": true,
"norms": false
},
"title": {
"type": "text",
"store": true,
"norms": false,
"analyzer": "swedish_index",
"search_analyzer": "swedish_query"
},
"url": {
"type": "keyword",
"store": true,
"norms": false
},
"tags": {
"type": "text",
"store": true,
"norms": false,
"analyzer": "swedish_index",
"search_analyzer": "swedish_query"
}
}
}
}
}
and the following script:
#!/bin/bash
curl -XDELETE http://localhost:9200/main
curl -XPUT -H "Content-type: application/json" -d #json/custse.index.json http://localhost:9200/main
curl -XPUT http://localhost:9200/main/_settings -H "Content-Type: application/json" -d "{
\"index\" : {
\"number_of_replicas\" : 0
}
}"
I get the following error message:
{"error":{"root_cause":[{"type":"illegal_state_exception","reason":"failed to load hunspell dictionary for locale: sv_SE"}]
I've tried putting my hunspell dictionaries in /usr/share/elasticsearch/config/hunspell/, /usr/share/elasticsearch/hunspell, /etc/elasticsearch/hunspell and /etc/elasticsearch/config/hunspell. It can't find any of them.
Here are the contents of the hunspell file:
/etc/elasticsearch$ ls hunspell
sv_SE
/etc/elasticsearch$ ls hunspell/sv_SE/
cust.dic README_sv_SE.txt sv_SE.aff sv_SE.dic
How can I make elasticsearch find the hunspell dictionaries?
I confirm that this configuration works:
version: '3.4'
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:6.8.13
container_name: elasticsearch6
ports:
- "127.0.0.1:9200:9200"
environment:
- "ES_JAVA_OPTS=-Xms256m -Xmx256m"
restart: always
volumes:
- "es_data:/usr/share/elasticsearch/data"
- ./elasticsearch/hunspell:/usr/share/elasticsearch/config/hunspell
volumes:
es_data:

Resources