I'm trying to host snipe-it image using docker following this tutorial https://www.youtube.com/watch?v=lyIIEVT5GYs&t=1308s
everything seem to work fine until I get to include my custom ssl Ceritficate to my snipe-it image, I have been trying for more than 3 Weeks but nothing works and i don't have any idea how to do that.
I would be very grateful for your help
My docker-compose.yml
version: '3'
services:
snipeit:
build:
context: .
dockerfile: Dockerfile.alpine
container_name: snipeit
ports:
- 8118:80
volumes:
- logs:/var/www/html/storage/logs
depends_on:
- snipedb
- redis
env_file:
- .env.docker
networks:
- snipeit-backend
snipedb:
image: mariadb:latest
volumes:
- db:/var/lib/mysql
env_file:
- .env.docker
networks:
- snipeit-backend
redis:
image: redis:latest
networks:
- snipeit-backend
mailhog:
image: mailhog/mailhog:latest
ports:
# - 1025:1025
- 8025:8025
networks:
- snipeit-backend
volumes:
db: {}
logs: {}
networks:
snipeit-backend: {}
my_env_file
# --------------------------------------------
# REQUIRED: DB SETUP
# --------------------------------------------
MYSQL_DATABASE=snipeit
MYSQL_USER=snipeit
MYSQL_PASSWORD=changeme1234
MYSQL_ROOT_PASSWORD=changeme1234
# --------------------------------------------
# REQUIRED: BASIC APP SETTINGS
# --------------------------------------------
APP_ENV=production
APP_DEBUG=false
APP_KEY=MY_APP_KEY
APP_URL=http://my_ip:8118
APP_TIMEZONE='Europe/Vienna'
APP_LOCALE=en
MAX_RESULTS=500
# --------------------------------------------
# REQUIRED: UPLOADED FILE STORAGE SETTINGS
# --------------------------------------------
PRIVATE_FILESYSTEM_DISK=local
PUBLIC_FILESYSTEM_DISK=local_public
# --------------------------------------------
# REQUIRED: DATABASE SETTINGS
# --------------------------------------------
DB_CONNECTION=mysql
DB_HOST=snipedb
DB_DATABASE=snipeit
DB_USERNAME=snipeit
DB_PASSWORD=changeme1234
DB_PREFIX=null
DB_DUMP_PATH='/usr/bin'
DB_CHARSET=utf8mb4
DB_COLLATION=utf8mb4_unicode_ci
# --------------------------------------------
# OPTIONAL: SSL DATABASE SETTINGS
# --------------------------------------------
DB_SSL=false
DB_SSL_IS_PAAS=false
DB_SSL_KEY_PATH=null
DB_SSL_CERT_PATH=null
DB_SSL_CA_PATH=null
DB_SSL_CIPHER=null
# --------------------------------------------
# REQUIRED: OUTGOING MAIL SERVER SETTINGS
# --------------------------------------------
MAIL_DRIVER=smtp
MAIL_HOST=smtp.gmail.com
MAIL_PORT=587
MAIL_USERNAME=
MAIL_PASSWORD=
MAIL_ENCRYPTION=null
MAIL_FROM_ADDR=
MAIL_FROM_NAME='Snipe-IT'
MAIL_REPLYTO_ADDR=
MAIL_REPLYTO_NAME='Snipe-IT'
MAIL_AUTO_EMBED_METHOD='attachment'
# --------------------------------------------
# REQUIRED: IMAGE LIBRARY
# This should be gd or imagick
# --------------------------------------------
IMAGE_LIB=gd
# --------------------------------------------
# OPTIONAL: BACKUP SETTINGS
# --------------------------------------------
MAIL_BACKUP_NOTIFICATION_DRIVER=null
MAIL_BACKUP_NOTIFICATION_ADDRESS=null
BACKUP_ENV=true
# --------------------------------------------
# OPTIONAL: SESSION SETTINGS
# --------------------------------------------
SESSION_LIFETIME=12000
EXPIRE_ON_CLOSE=false
ENCRYPT=false
COOKIE_NAME=snipeit_session
COOKIE_DOMAIN=null
SECURE_COOKIES=false
API_TOKEN_EXPIRATION_YEARS=40
# --------------------------------------------
# OPTIONAL: SECURITY HEADER SETTINGS
# --------------------------------------------
APP_TRUSTED_PROXIES=192.168.1.1,10.0.0.1
ALLOW_IFRAMING=false
REFERRER_POLICY=same-origin
ENABLE_CSP=false
CORS_ALLOWED_ORIGINS=null
ENABLE_HSTS=false
# --------------------------------------------
# OPTIONAL: CACHE SETTINGS
# --------------------------------------------
CACHE_DRIVER=file
SESSION_DRIVER=file
QUEUE_DRIVER=sync
CACHE_PREFIX=snipeit
# --------------------------------------------
# OPTIONAL: REDIS SETTINGS
# --------------------------------------------
REDIS_HOST=redis
REDIS_PASSWORD=null
REDIS_PORT=6379
# --------------------------------------------
# OPTIONAL: MEMCACHED SETTINGS
# --------------------------------------------
MEMCACHED_HOST=null
MEMCACHED_PORT=null
# --------------------------------------------
# OPTIONAL: PUBLIC S3 Settings
# --------------------------------------------
PUBLIC_AWS_SECRET_ACCESS_KEY=null
PUBLIC_AWS_ACCESS_KEY_ID=null
PUBLIC_AWS_DEFAULT_REGION=null
PUBLIC_AWS_BUCKET=null
PUBLIC_AWS_URL=null
PUBLIC_AWS_BUCKET_ROOT=null
# --------------------------------------------
# OPTIONAL: PRIVATE S3 Settings
# --------------------------------------------
PRIVATE_AWS_ACCESS_KEY_ID=null
PRIVATE_AWS_SECRET_ACCESS_KEY=null
PRIVATE_AWS_DEFAULT_REGION=null
PRIVATE_AWS_BUCKET=null
PRIVATE_AWS_URL=null
PRIVATE_AWS_BUCKET_ROOT=null
# --------------------------------------------
# OPTIONAL: LOGIN THROTTLING
# --------------------------------------------
LOGIN_MAX_ATTEMPTS=5
LOGIN_LOCKOUT_DURATION=60
RESET_PASSWORD_LINK_EXPIRES=900
# --------------------------------------------
# OPTIONAL: MISC
# --------------------------------------------
APP_LOG=stderr
APP_LOG_MAX_FILES=10
APP_LOCKED=false
APP_CIPHER=AES-256-CBC
GOOGLE_MAPS_API=
LDAP_MEM_LIM=500M
LDAP_TIME_LIM=600
Related
I have a Docker setup with Rails and Anycable. Only in production does request.params return nothing. In development this is not an issue.
If I try to make a connection to wss://api.test.to:8443/live?uid=yyy&token=xxx
The connection attempt is made but in my connection.rb the request.params always returns an empty object.
This is my fairly standard connection.rb.
module ApplicationCable
class Connection < ActionCable::Connection::Base
identified_by :current_user
def connect
self.current_user = find_verified_user
end
private
def find_verified_user
auth_header = request.params[:token]
uid_header = request.params[:uid]
logger.debug(request.params[:token]) # logs nothing
logger.debug(request.to_yaml) # logs request object with no parameters included.
begin
decoded = JsonWebToken.decode(auth_header)
rescue JWT::VerificationError, JWT::ExpiredSignature, JWT::DecodeError => error_string
logger.debug("AUTH ERROR: " + error_string.to_s)
return reject_unauthorized_connection # If decoding the JWT failed, reject auth.
end
user_decoded = User.find(decoded[:user_id])
if user_decoded.id === uid_header
return user_decoded
else
return reject_unauthorized_connection
end
end
end
end
docker-compose.yml:
version: '3.8'
services:
app:
image: app-backend
build:
context: .
dockerfile: Dockerfile
command: bundle exec rails s -b 0.0.0.0
depends_on:
- database
- redis
volumes:
- .:/app:cached
- gem_cache:/usr/local/bundle/gems:cached
env_file: production.env
environment:
RAILS_ENV: production
database:
image: postgres:13-alpine
ports:
- '5432:5432'
volumes:
- db_data:/var/lib/postgresql/data
- ./init.sql:/docker-entrypoint-initdb.d/init.sql:delegated
healthcheck:
test: pg_isready -U postgres -h 127.0.0.1
interval: 5s
redis:
image: redis:alpine
volumes:
- redis_data:/data
ports:
- 6379
healthcheck:
test: redis-cli ping
interval: 1s
timeout: 3s
retries: 30
rpc:
entrypoint: ["bundle", "exec", "anycable"]
build:
context: .
volumes:
- .:/app:cached
- gem_cache:/usr/local/bundle/gems:cached
env_file: production.env
environment:
RAILS_ENV: production
ANYCABLE_REDIS_URL: redis://redis:6379/0
ANYCABLE_RPC_HOST: 0.0.0.0:50051
ANYCABLE_DEBUG: 0
depends_on:
app:
condition: service_started
database:
condition: service_healthy
redis:
condition: service_healthy
anycable:
image: anycable/anycable-go:latest-alpine
entrypoint: ["anycable-go", "-ssl_cert=/var/ssl/certbot/conf/live/api.pasta.to/fullchain.pem", "-ssl_key=/var/ssl/certbot/conf/live/api.pasta.to/privkey.pem", "--path=/live", "--log_level=debug", "--debug"]
ports:
- '8443:8443'
environment:
ANYCABLE_HOST: "0.0.0.0"
ANYCABLE_PORT: 8443
ANYCABLE_REDIS_URL: redis://redis:6379/0
ANYCABLE_RPC_HOST: rpc:50051
ANYCABLE_DEBUG: 1
volumes:
- ./ssl:/var/ssl
depends_on:
rpc:
condition: service_started
app:
condition: service_started
nginx:
image: ymuski/nginx-quic:latest
command: "/bin/sh -c 'while :; do sleep 6h & wait $${!}; nginx -s reload; done & nginx -g \"daemon off;\"'"
volumes:
- ./ssl:/var/ssl
- ./nginx/prod/nginx.conf:/etc/nginx/nginx.conf
- ./public:/var/web
ports:
- 80:80
- 443:443
depends_on:
app:
condition: service_started
volumes:
gem_cache:
db_data:
redis_data:
ssl_root:
production.rb
Rails.application.configure do
config.cache_classes = true
config.eager_load = true
config.consider_all_requests_local = true
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
config.active_storage.service = :local
config.action_cable.url = 'wss://api.test.to:8443/live'
config.action_cable.disable_request_forgery_protection = true
config.log_level = :debug
config.logger = Logger.new(STDOUT)
config.log_tags = [ :request_id ]
config.cache_store = :redis_cache_store, {driver: :hiredis, url: "redis://redis:6379/2"}
config.action_mailer.perform_caching = false
config.i18n.fallbacks = true
config.active_support.deprecation = :notify
config.log_formatter = ::Logger::Formatter.new
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
config.active_record.dump_schema_after_migration = false
config.active_record.verbose_query_logs = true
end
This is what's logged when a websocket connection is made:
https://user-images.githubusercontent.com/15372551/124969065-8e8e4680-e026-11eb-912d-2aa8e98f5607.png
I was able to resolve this by upgrading to the latest version of anycable-rails, > 1.0
I have a docker-compose.yml as given below with service defined for selenium using selenium/standalone-chrome-debug image.
# docker-compose.yml
version: '3'
services:
webapp:
tty: true
stdin_open: true
container_name: webapp
depends_on:
- postgres
- elasticsearch
- redis
- selenium
build: .
volumes:
- .:/webapp
ports:
- "3000:3000"
entrypoint: sh /webapp/setup.sh
environment:
- REDISTOGO_URL=redis://redis:6379
- ELASTICSEARCH_URL=http://elasticsearch:9200
- SELENIUM_HOST=selenium
- SELENIUM_PORT=4444
postgres:
container_name: postgres
image: postgres:9.5.17
ports:
- "5432:5432"
volumes:
- ./postgres:/var/lib/postgresql
environment:
- POSTGRES_PASSWORD=test
- POSTGRES_USER=test
- POSTGRES_DB=test
redis:
container_name: redis
image: redis:5.0.5-alpine
command: redis-server
hostname: redis
ports:
- "6379:6379"
volumes:
- redis:/data
sidekiq:
build: .
command: bundle exec sidekiq
volumes:
- .:/webapp
depends_on:
- postgres
- redis
environment:
- REDISTOGO_URL=redis://redis:6379
elasticsearch:
image: elasticsearch:6.8.0
container_name: elasticsearch
ports:
- "9200:9200"
depends_on:
- postgres
volumes:
- esdata:/usr/share/elasticsearch/data
selenium:
image: selenium/standalone-chrome-debug
ports:
- "4444:4444"
volumes:
redis:
postgres:
esdata:
And rails_helper.rb
# rails_helper.rb
require 'database_cleaner'
require 'simplecov'
SimpleCov.start('rails') do
coverage_dir 'coverage'
add_group 'Modules', 'app/modules'
add_filter "lib/api_constraints.rb"
add_filter "app/uploaders/"
add_filter "app/models/redactor_rails/"
add_filter "app/controllers/application_controller.rb"
add_filter "app/models/application_record.rb"
add_filter "app/workers/"
end
# This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
# Add additional requires below this line. Rails is not loaded until this point!
require 'capybara/rspec'
require 'net_http_ssl_fix'
require 'selenium-webdriver'
require 'webdrivers/chromedriver'
require 'spree/testing_support/capybara_ext'
require 'rack_session_access/capybara'
require 'capybara-screenshot/rspec'
require 'rspec/retry'
# Add rake task example group
require 'support/tasks'
# Force lock local timezone for test environment
ENV['TZ'] = 'UTC'
Webdrivers.cache_time = 86_400
selenium_host = "http://127.0.0.1:4444/wd/hub"
unless ENV['SELENIUM_HOST'].nil?
selenium_host = "http://#{ ENV["SELENIUM_HOST"] }:4444/wd/hub"
end
Capybara.register_driver :selenium_chrome do |app|
caps = Selenium::WebDriver::Remote::Capabilities.chrome(
browserName: 'chrome',
"chromeOptions" => {
args: ['headless','no-sandbox','disable-gpu','window-size=1920x1080']
}
)
Capybara::Selenium::Driver.new(
app,
browser: :chrome,
url: selenium_host,
desired_capabilities: caps
)
end
Capybara.server = :puma, { Silent: true }
Capybara.javascript_driver = :selenium_chrome
Capybara.save_path = "#{ Rails.root }/tmp/screenshots/"
Capybara.raise_server_errors = false
Capybara.default_max_wait_time = 10
Capybara.asset_host = 'http://localhost:3000'
Capybara.configure do |config|
config.match = :prefer_exact
config.ignore_hidden_elements = false
config.visible_text_only = true
# accept clicking of associated label for checkboxes/radio buttons (css psuedo elements)
config.automatic_label_click = true
end
Capybara.always_include_port = true
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migration and applies them before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
config.expect_with :rspec do |c|
# enable both should and expect
c.syntax = [:should, :expect]
end
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
config.append_after(:each) do
Capybara.reset_sessions!
end
config.include Capybara::DSL
config.order = "random"
config.use_transactional_fixtures = false
config.before(:suite) do
DatabaseCleaner.clean_with(:truncation)
# compile front end
WebpackerHelper.compile_once
# disable searchkick callbacks, now enabled by using search hook
Searchkick.disable_callbacks
end
# hook for enabling searchkick callbacks
config.around(:each, search: true) do |example|
Searchkick.callbacks(true) do
example.run
end
end
config.before(:each) do
DatabaseCleaner.strategy = Capybara.current_driver == :rack_test ? :transaction : :truncation
DatabaseCleaner.clean
DatabaseCleaner.start
DownloadHelper.clear_downloads
Factory.seed_data
end
config.after(:each) do
Capybara.app_host = nil # don't change me, explicitly set host in each spec appropriately
DatabaseCleaner.clean
Timecop.return
end
config.include DeviseHelpers
config.include Devise::Test::ControllerHelpers, type: :controller
config.include CommonHelper
config.include ImageHelper
config.include CommonSpecHelper
config.include ReactComponentHelper
config.include BraintreeHelper
config.include BookingSpecHelper
config.include CapybaraRspecExt
config.include DownloadHelper
config.include ActionView::Helpers::NumberHelper
config.include ActionView::Helpers::DateHelper
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
# Suppress Braintree noise
null_logger = Logger.new("/dev/null")
null_logger.level = Logger::INFO
Braintree::Configuration.logger = null_logger
config.after(:example, :on_fail => :screenshot) do |example|
full_screenshot if example.exception
end
config.after(:example, :on_fail => :open_page) do |example|
save_and_open_page if example.exception
end
# set parallel env for searchkick
Searchkick.index_suffix = ENV['TEST_ENV_NUMBER']
# show retry status in spec process
config.verbose_retry = true
# default number of retries
config.default_retry_count = 0
# sleep for 1 seconds before retry
config.default_sleep_interval = 1
# Retry failing specs (conditions to retry are set in config.retry_count_condition)
config.around :each do |ex|
ex.run_with_retry
end
# Retry failing JS specs or failing specs with specific exception
config.retry_count_condition = proc do |ex|
if (ex.metadata[:js] || [Net::ReadTimeout].include?(ex.exception.class)) && !ex.metadata[:on_fail]
nil # will fallback to config.default_retry_count
else
0 # no retries if conditions not matched
end
end
# callback to be run between retries
config.retry_callback = proc do |ex|
Capybara.reset!
end
end
When I run docker-compose exec webapp rspec spec/feature/test_spec.rb for feature spec with js: true it fails with following stacktrace:
Failure/Error: ex.run_with_retry
Selenium::WebDriver::Error::WebDriverError:
<unknown>: Failed to read the 'sessionStorage' property from 'Window': Access is denied for this document.
(Session info: chrome=75.0.3770.100)
# #0 0x5651e686d7a9 <unknown>
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/remote/response.rb:72:in `assert_ok'
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/remote/response.rb:34:in `initialize'
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/remote/http/common.rb:88:in `new'
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/remote/http/common.rb:88:in `create_response'
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/remote/http/default.rb:114:in `request'
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/remote/http/common.rb:64:in `call'
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/remote/bridge.rb:167:in `execute'
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/remote/w3c/bridge.rb:567:in `execute'
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/remote/w3c/bridge.rb:305:in `execute_script'
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/remote/w3c/bridge.rb:277:in `clear_session_storage'
# /usr/local/bundle/gems/selenium-webdriver-3.142.3/lib/selenium/webdriver/common/html5/session_storage.rb:40:in `clear'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara/selenium/driver.rb:325:in `clear_session_storage'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara/selenium/driver.rb:317:in `clear_storage'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara/selenium/driver_specializations/chrome_driver.rb:45:in `clear_storage'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara/selenium/driver.rb:291:in `clear_browser_state'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara/selenium/driver.rb:446:in `reset_browser_state'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara/selenium/driver.rb:125:in `reset!'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara/selenium/driver_specializations/chrome_driver.rb:36:in `reset!'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara/session.rb:128:in `reset!'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara.rb:315:in `block in reset_sessions!'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara.rb:315:in `reverse_each'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara.rb:315:in `reset_sessions!'
# /usr/local/bundle/gems/capybara-3.20.0/lib/capybara/rspec.rb:18:in `block (2 levels) in <top (required)>'
# /usr/local/bundle/gems/rspec-retry-0.6.1/lib/rspec/retry.rb:123:in `block in run'
# /usr/local/bundle/gems/rspec-retry-0.6.1/lib/rspec/retry.rb:110:in `loop'
# /usr/local/bundle/gems/rspec-retry-0.6.1/lib/rspec/retry.rb:110:in `run'
# /usr/local/bundle/gems/rspec-retry-0.6.1/lib/rspec_ext/rspec_ext.rb:12:in `run_with_retry'
# ./spec/rails_helper.rb:224:in `block (2 levels) in <top (required)>'
# /usr/local/bundle/gems/rspec-retry-0.6.1/lib/rspec/retry.rb:123:in `block in run'
# /usr/local/bundle/gems/rspec-retry-0.6.1/lib/rspec/retry.rb:110:in `loop'
# /usr/local/bundle/gems/rspec-retry-0.6.1/lib/rspec/retry.rb:110:in `run'
# /usr/local/bundle/gems/rspec-retry-0.6.1/lib/rspec_ext/rspec_ext.rb:12:in `run_with_retry'
# /usr/local/bundle/gems/rspec-retry-0.6.1/lib/rspec/retry.rb:37:in `block (2 levels) in setup'
Is there anything that I am missing? What is the correct way to setup selenium with docker-compose?
Since you're using the selenium standalone server setup you need to configure the Capybara selenium driver for remote usage.
Capybara.register_driver :selenium_chrome do |app|
options = Selenium::WebDriver::Chrome::Options.new(args: %w[
headless no-sandbox disable-gpu window-size=1920x1080
])
Capybara::Selenium::Driver.new(
app,
browser: :remote,
desired_capabilities: :chrome,
options: options
url: selenium_host,
)
end
Other notes:
If you're using Rails 5.1+ you can probably remove all the database cleaner stuff and just enable transactional tests
Setting ignore_hidden_elements = false is a terrible idea when writing tests since you generally only want to be dealing with elements the user can actually see
:smart is generally a better default for Capybara.match (rather than :prefer_exact) if you care about making sure your tests are referring to the elements you expect them to be.
You shouldn't be including Capybara::DSL in every RSpec test type
You need the hub image which is missing in the above compose file. Link the hub image to node image
selenium-hub:
image: selenium/hub:3.14.0
container_name: hub
ports:
- "4444:4444"
node-chrome:
image: selenium/node-chrome-debug:3.14.0
links:
- selenium-hub
i can't run properly actioncable on my ubuntu server, with docker
It seems to be a kind of authentification error with devise, this is my terminal log
There was an exception - NoMethodError(undefined method `user' for nil:NilClass)
cable_1 | /var/www/acim/public/app/channels/application_cable/connection.rb:17:in `find_verified_user'
This is my /app/chanels/application_cable/connection.rb
module ApplicationCable
class Connection < ActionCable::Connection::Base
identified_by :current_user
def connect
self.current_user = find_verified_user
logger.add_tags 'ActionCable', current_user.email
end
protected
def find_verified_user
if (current_user = env['warden'].user)
#if current_user = User.find_by(id: cookies.signed[:user_id])
current_user
else
reject_unauthorized_connection
end
end
end
end
* And here the config/environemments/production.rb *
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = true
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
config.action_cable.url = 'ws://192.168.99.100/cable'
config.action_cable.allowed_request_origins = [ 'http://192.168.99.100', /http:\/\/192.168.99.100.*/, 'http://localhost' ]
#config.action_cable.url = [/ws:\/\/*/, /wss:\/\/*/]
#config.action_cable.allowed_request_origins = [/http:\/\/*/, /https:\/\/*/]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "ACIM_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# ne pas directement servir les fichiers public
# en utilisant ruby, mais passons par NGINX
config.public_file_server.enabled = false
config.assets.js_compressor = Uglifier.new(harmony: true)
end
Rails.application.config.assets.precompile += %w( *.js ^[^_]*.css *.css.erb )
Here my docker-compose.yml file
version: '3'
services:
db:
image: postgres:9.6
environment:
- ACIM_DATABASE_PASSWORD=ACIM_2018
volumes:
- 'db:/var/lib/postgresql/data'
env_file:
- '.env'
expose:
- '5432'
ports:
- "7000:5432"
nginx:
image: nginx:latest
container_name: production_nginx
volumes:
- ./config/nginx/nginx.conf:/etc/nginx/nginx.conf
- public-content:/var/www/acim/public
ports:
- 80:80
- 443:443
links:
- web
redis:
image: redis
command: redis-server
volumes:
- 'redis:/data'
ports:
- "6379"
sidekiq:
build: .
#command: sidekiq -C config/sidekiq.yml
command: bundle exec sidekiq
volumes:
- .:/ACIM
links:
- db
- redis
depends_on:
- db
- redis
env_file:
- '.env'
web:
build: .
# command: bundle exec rails s -p 3000 -b '0.0.0.0'
volumes:
- bundle_cache:/bundle
- public-content:/var/www/acim/public
- .:/ACIM
ports:
- "5000:3000"
depends_on:
- db
- redis
env_file:
- '.env'
cable:
depends_on:
- 'redis'
build: .
command: puma -p 28080 cable/config.ru
ports:
- '28080:28080'
volumes:
- '.:/ACIM'
env_file:
- '.env'
volumes:
bundle_cache:
redis:
db:
public-content:
Please help,
This is killing me since 2 days now !
Thanks in advance
I have a working setup using the warden cookie variant that you have commented plus a warden_hooks initializer to manage the creation and invalidation of the cookie
/app/chanels/application_cable/connection.rb
module ApplicationCable
class Connection < ActionCable::Connection::Base
identified_by :current_user
def connect
self.current_user = find_verified_user
end
private
def find_verified_user
if current_user = User.find_by(id: cookies.signed['user_token'])
current_user.id
else
reject_unauthorized_connection
end
end
end
end
/app/config/initializers/warden_hooks.rb
# frozen_string_literal: true
# Set user_token cookie after sign in
Warden::Manager.after_set_user do |user, auth, opts|
scope = opts[:scope]
auth.cookies.signed["#{scope}_token"] = user.id
end
# Invalidate user.id cookie on sign out
Warden::Manager.before_logout do |user, auth, opts|
scope = opts[:scope]
auth.cookies.signed["#{scope}_token"] = nil
end
Finally i'm able to get everything working after some changes, so in case of it can help someone here is what worked for me:
/app/chanels/application_cable/connection.rb
module ApplicationCable
class Connection < ActionCable::Connection::Base
identified_by :current_user
def connect
# This is a websocket so we have no warden and no session here
# How to reuse the login made with devise?
# http://www.rubytutorial.io/actioncable-devise-authentication/
self.current_user = find_verified_user
logger.info("current_user: #{self.current_user.inspect}")
logger.add_tags "ActionCable", current_user.email
end
protected
def find_verified_user
verified_user = User.find_by(id: cookies.signed['user.id'])
if verified_user && cookies.signed['user.expires_at'] > Time.now
verified_user
else
reject_unauthorized_connection
end
end
end
end
/app/config/initializers/warden_hooks.rb
# http://www.rubytutorial.io/actioncable-devise-authentication/
Warden::Manager.after_set_user do |user,auth,opts|
scope = opts[:scope]
auth.cookies.signed["#{scope}.id"] = user.id
auth.cookies.signed["#{scope}.expires_at"] = 30.minutes.from_now
end
Warden::Manager.before_logout do |user, auth, opts|
scope = opts[:scope]
auth.cookies.signed["#{scope}.id"] = nil
auth.cookies.signed["#{scope}.expires_at"] = nil
end
** config/environemments/production.rb**
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = true
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
#config.action_cable.url = 'ws://192.168.99.100/cable'
#config.action_cable.allowed_request_origins = [ 'http://192.168.99.100', /http:\/\/192.168.99.100.*/, 'http://localhost' ]
config.action_cable.url = 'ws://192.168.99.100/cable'
config.action_cable.allowed_request_origins = [/http:\/\/*/, /https:\/\/*/]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "ACIM_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# ne pas directement servir les fichiers public
# en utilisant ruby, mais passons par NGINX
config.public_file_server.enabled = false
config.assets.js_compressor = Uglifier.new(harmony: true)
end
Rails.application.config.assets.precompile += %w( *.js ^[^_]*.css *.css.erb )
config/initializers/sidekiq.yml
Sidekiq.configure_server do |config|
config.redis = { url: 'redis://192.168.99.100:6379' }
end
Sidekiq.configure_client do |config|
config.redis = { url: 'redis://192.168.99.100:6379' }
end
** Dockerfile **
FROM ruby:2.5.1
# Install dependencies
RUN apt-get update -qq && apt-get install -y build-essential libpq-dev nodejs
ENV RAILS_ROOT /var/www/acim/public
RUN mkdir -p $RAILS_ROOT
RUN mkdir -p $RAILS_ROOT/log
RUN rm -rf /var/www/acim/public/tmp/pids/server.pid
# Set working directory, where the commands will be ran:
WORKDIR $RAILS_ROOT
# Setting env up
ENV RAILS_ENV='production'
ENV RACK_ENV='production'
# Adding gems
COPY Gemfile Gemfile
COPY Gemfile.lock Gemfile.lock
RUN bundle install --jobs 20 --retry 5 --without development test
# Adding project files
COPY . .
RUN bundle exec rake assets:precompile
EXPOSE 3000
CMD ["bundle", "exec", "puma", "-C", "config/puma.rb"]
docker-compose.yml
version: '3'
services:
db:
image: postgres:9.6
environment:
- ACIM_DATABASE_PASSWORD=ACIM_2018
volumes:
- 'db:/var/lib/postgresql/data'
env_file:
- '.env'
expose:
- '5432'
ports:
- "7000:5432"
nginx:
image: nginx:latest
container_name: production_nginx
volumes:
- ./config/nginx/nginx.conf:/etc/nginx/nginx.conf
- public-content:/var/www/acim/public
ports:
- 80:80
- 443:443
links:
- web
# maildev:
# image: djfarrelly/maildev
# ports:
# - "2000:80"
redis:
image: redis
command: redis-server
volumes:
- 'redis:/data'
ports:
- "6379:6379"
sidekiq:
build: .
#command: sidekiq -C config/sidekiq.yml
command: bundle exec sidekiq
volumes:
- .:/ACIM
links:
- db
- redis
depends_on:
- db
- redis
env_file:
- '.env'
web:
build: .
volumes:
- bundle_cache:/bundle
- public-content:/var/www/acim/public
- .:/ACIM
ports:
- "5000:3000"
depends_on:
- db
- redis
env_file:
- '.env'
cable:
depends_on:
- 'redis'
- 'sidekiq'
build: .
command: puma -p 28080 cable/config.ru
ports:
- '28080:28080'
volumes:
- '.:/ACIM'
env_file:
- '.env'
volumes:
bundle_cache:
redis:
db:
public-content:
I have a strange issue on a rails app running on a docker container. I run RAILS_ENV=development db:drop db:create and it tries to create/drop only the test database.
Rails 5.0.0
my config/database.yml is
default: &default
adapter: postgresql
encoding: unicode
pool: <%= ENV.fetch("RAILS_MAX_THREADS") { 5 } %>
development:
<<: *default
database: appname-api_development
username: appname
password: appname
host: <%= ENV['DOCKER_DB_HOST'] || 'localhost' %>
test:
<<: *default
database: appname-api_test<%= ENV['TEST_ENV_NUMBER'] %>
username: appname
password: appname
host: <%= ENV['DOCKER_DB_HOST'] || 'localhost' %>
and this is what I get:
root#9176b57db829:/appname# RAILS_ENV=development rake db:drop db:create
Dropped database 'db'
Dropped database 'appname-api_test'
Created database 'db'
Created database 'appname-api_test'
This is running on a docker container and there is no env set for RAILS_ENV or RACK_ENV or anything like that.
This is my docker-compose.yml:
version: '2'
services:
db:
image: postgres
restart: always
environment:
POSTGRES_USER: appname
POSTGRES_PASSWORD: appname
volumes:
- "${HOME}/.postgres-data:/var/lib/postgresql/data"
redis:
image: redis:3.2
api:
build: .
environment:
DOCKER_DB_HOST: 'db'
REDIS_PROVIDER: 'redis://redis:6379/1'
REDIS_URL: 'redis://redis:6379/1'
SMTP_HOST: 'localhost:3000'
# command: bundle exec rails s -b "0.0.0.0"
volumes:
- "${PWD}:/appname"
ports:
- "3000:3000"
- "4000:4000"
depends_on:
- db
- redis
networks:
default:
aliases:
- api
And this is my config/environments/development.rb file:
# frozen_string_literal: true
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
config.action_cable.url = ENV["ACTIONCABLE_URL"]
config.action_cable.allowed_request_origins = [ ENV["FRONTEND_ORIGIN"] ]
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join("tmp/caching-dev.txt").exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=172800",
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
config.action_mailer.delivery_method = :letter_opener
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Uses Guard and LiveReload to reload html/css changes automatically
config.middleware.insert_after ActionDispatch::Static, Rack::LiveReload
end
Have error Excon::Error::Socket (getaddrinfo: Name or service not known (SocketError)) when try to upload file through carrierwave fog-aws to minio.
Docker compose
version: '3'
services:
minio:
image: minio/minio
deploy:
resources:
limits:
memory: 256m
volumes:
- 'minio:/var/lib/minio'
environment:
- "MINIO_ACCESS_KEY=development"
- "MINIO_SECRET_KEY=development"
ports:
- "9000:9000"
command: server /export
rails:
build: .
command: bash -c 'rm -f /test/tmp/pids/server.pid && bundle && bundle exec rails s -p 3000 -b 0.0.0.0'
volumes:
- .:/test
ports:
- "3000:3000"
depends_on:
- minio
volumes:
minio:
Carrierwave initializer
CarrierWave.configure do |config|
config.fog_provider = 'fog/aws'
config.fog_credentials = {
provider: 'AWS',
aws_access_key_id: 'development',
aws_secret_access_key: 'development',
region: 'us-east-1',
host: 'minio',
endpoint: 'http://localhost:9000'
}
config.fog_directory = 'test'
config.fog_public = false
# config.fog_attributes = { cache_control: "public, max-age=#{365.day.to_i}" } # optional, defaults to {}
end
You Carrierwave inside docker container should point to the service DNS in your case following change should work
CarrierWave.configure do |config|
config.fog_provider = 'fog/aws'
config.fog_credentials = {
provider: 'AWS',
aws_access_key_id: 'development',
aws_secret_access_key: 'development',
region: 'us-east-1',
host: 'minio',
endpoint: 'http://minio:9000'
}
config.fog_directory = 'test'
config.fog_public = false
# config.fog_attributes = { cache_control: "public, max-age=#{365.day.to_i}" } # optional, defaults to {}
end