Can't upload file through carrierwave fog-aws to minio (docker-compose) - ruby-on-rails

Have error Excon::Error::Socket (getaddrinfo: Name or service not known (SocketError)) when try to upload file through carrierwave fog-aws to minio.
Docker compose
version: '3'
services:
minio:
image: minio/minio
deploy:
resources:
limits:
memory: 256m
volumes:
- 'minio:/var/lib/minio'
environment:
- "MINIO_ACCESS_KEY=development"
- "MINIO_SECRET_KEY=development"
ports:
- "9000:9000"
command: server /export
rails:
build: .
command: bash -c 'rm -f /test/tmp/pids/server.pid && bundle && bundle exec rails s -p 3000 -b 0.0.0.0'
volumes:
- .:/test
ports:
- "3000:3000"
depends_on:
- minio
volumes:
minio:
Carrierwave initializer
CarrierWave.configure do |config|
config.fog_provider = 'fog/aws'
config.fog_credentials = {
provider: 'AWS',
aws_access_key_id: 'development',
aws_secret_access_key: 'development',
region: 'us-east-1',
host: 'minio',
endpoint: 'http://localhost:9000'
}
config.fog_directory = 'test'
config.fog_public = false
# config.fog_attributes = { cache_control: "public, max-age=#{365.day.to_i}" } # optional, defaults to {}
end

You Carrierwave inside docker container should point to the service DNS in your case following change should work
CarrierWave.configure do |config|
config.fog_provider = 'fog/aws'
config.fog_credentials = {
provider: 'AWS',
aws_access_key_id: 'development',
aws_secret_access_key: 'development',
region: 'us-east-1',
host: 'minio',
endpoint: 'http://minio:9000'
}
config.fog_directory = 'test'
config.fog_public = false
# config.fog_attributes = { cache_control: "public, max-age=#{365.day.to_i}" } # optional, defaults to {}
end

Related

AnyCable request.params is empty in production

I have a Docker setup with Rails and Anycable. Only in production does request.params return nothing. In development this is not an issue.
If I try to make a connection to wss://api.test.to:8443/live?uid=yyy&token=xxx
The connection attempt is made but in my connection.rb the request.params always returns an empty object.
This is my fairly standard connection.rb.
module ApplicationCable
class Connection < ActionCable::Connection::Base
identified_by :current_user
def connect
self.current_user = find_verified_user
end
private
def find_verified_user
auth_header = request.params[:token]
uid_header = request.params[:uid]
logger.debug(request.params[:token]) # logs nothing
logger.debug(request.to_yaml) # logs request object with no parameters included.
begin
decoded = JsonWebToken.decode(auth_header)
rescue JWT::VerificationError, JWT::ExpiredSignature, JWT::DecodeError => error_string
logger.debug("AUTH ERROR: " + error_string.to_s)
return reject_unauthorized_connection # If decoding the JWT failed, reject auth.
end
user_decoded = User.find(decoded[:user_id])
if user_decoded.id === uid_header
return user_decoded
else
return reject_unauthorized_connection
end
end
end
end
docker-compose.yml:
version: '3.8'
services:
app:
image: app-backend
build:
context: .
dockerfile: Dockerfile
command: bundle exec rails s -b 0.0.0.0
depends_on:
- database
- redis
volumes:
- .:/app:cached
- gem_cache:/usr/local/bundle/gems:cached
env_file: production.env
environment:
RAILS_ENV: production
database:
image: postgres:13-alpine
ports:
- '5432:5432'
volumes:
- db_data:/var/lib/postgresql/data
- ./init.sql:/docker-entrypoint-initdb.d/init.sql:delegated
healthcheck:
test: pg_isready -U postgres -h 127.0.0.1
interval: 5s
redis:
image: redis:alpine
volumes:
- redis_data:/data
ports:
- 6379
healthcheck:
test: redis-cli ping
interval: 1s
timeout: 3s
retries: 30
rpc:
entrypoint: ["bundle", "exec", "anycable"]
build:
context: .
volumes:
- .:/app:cached
- gem_cache:/usr/local/bundle/gems:cached
env_file: production.env
environment:
RAILS_ENV: production
ANYCABLE_REDIS_URL: redis://redis:6379/0
ANYCABLE_RPC_HOST: 0.0.0.0:50051
ANYCABLE_DEBUG: 0
depends_on:
app:
condition: service_started
database:
condition: service_healthy
redis:
condition: service_healthy
anycable:
image: anycable/anycable-go:latest-alpine
entrypoint: ["anycable-go", "-ssl_cert=/var/ssl/certbot/conf/live/api.pasta.to/fullchain.pem", "-ssl_key=/var/ssl/certbot/conf/live/api.pasta.to/privkey.pem", "--path=/live", "--log_level=debug", "--debug"]
ports:
- '8443:8443'
environment:
ANYCABLE_HOST: "0.0.0.0"
ANYCABLE_PORT: 8443
ANYCABLE_REDIS_URL: redis://redis:6379/0
ANYCABLE_RPC_HOST: rpc:50051
ANYCABLE_DEBUG: 1
volumes:
- ./ssl:/var/ssl
depends_on:
rpc:
condition: service_started
app:
condition: service_started
nginx:
image: ymuski/nginx-quic:latest
command: "/bin/sh -c 'while :; do sleep 6h & wait $${!}; nginx -s reload; done & nginx -g \"daemon off;\"'"
volumes:
- ./ssl:/var/ssl
- ./nginx/prod/nginx.conf:/etc/nginx/nginx.conf
- ./public:/var/web
ports:
- 80:80
- 443:443
depends_on:
app:
condition: service_started
volumes:
gem_cache:
db_data:
redis_data:
ssl_root:
production.rb
Rails.application.configure do
config.cache_classes = true
config.eager_load = true
config.consider_all_requests_local = true
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
config.active_storage.service = :local
config.action_cable.url = 'wss://api.test.to:8443/live'
config.action_cable.disable_request_forgery_protection = true
config.log_level = :debug
config.logger = Logger.new(STDOUT)
config.log_tags = [ :request_id ]
config.cache_store = :redis_cache_store, {driver: :hiredis, url: "redis://redis:6379/2"}
config.action_mailer.perform_caching = false
config.i18n.fallbacks = true
config.active_support.deprecation = :notify
config.log_formatter = ::Logger::Formatter.new
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
config.active_record.dump_schema_after_migration = false
config.active_record.verbose_query_logs = true
end
This is what's logged when a websocket connection is made:
https://user-images.githubusercontent.com/15372551/124969065-8e8e4680-e026-11eb-912d-2aa8e98f5607.png
I was able to resolve this by upgrading to the latest version of anycable-rails, > 1.0

Accessing file downloads from containerized RSpec/Capybara and Selenium Chrome

I would like to run an RSpec/Capybara test suite in Docker. This test suite performs a file download.
If I run the test suite with rspec, I am able to access the downloaded file.
If I run both rspec and selenium chrome as containers, I cannot figure out how to access the downloaded file.
.ruby-version
2.7.0
Gemfile
source "https://rubygems.org"
gem 'rspec'
gem 'capybara'
gem 'capybara-webmock'
gem 'colorize'
gem 'webdrivers'
#gem 'chromedriver-helper'
gem 'selenium-webdriver'
gem 'byebug'
spec/spec_helper.rb
require 'colorize'
require 'capybara/dsl'
require 'capybara/rspec'
require 'byebug'
RSpec.configure do |config|
config.color = true
config.tty = true
config.formatter = :documentation
config.include Capybara::DSL
end
def create_web_session
Capybara.app_host = 'https://github.com'
Capybara.run_server = false # don't start Rack
if ENV['CHROME_URL']
Capybara.register_driver :selenium_chrome_headless do |app|
args = [
'--no-default-browser-check',
'--start-maximized',
'--headless',
'--disable-dev-shm-usage',
'--whitelisted-ips'
]
caps = Selenium::WebDriver::Remote::Capabilities.chrome("chromeOptions" => {"args" => args})
Capybara::Selenium::Driver.new(
app,
browser: :remote,
desired_capabilities: caps,
url: "http://chrome:4444/wd/hub"
)
end
end
#session = Capybara::Session.new(:selenium_chrome_headless)
##session = Capybara::Session.new(:selenium_chrome)
end
spec/test/demo_spec.rb
require 'spec_helper.rb'
require 'webdrivers/chromedriver'
sleep 1
RSpec.describe 'basic_tests', type: :feature do
before(:each) do
#session = create_web_session
end
it 'Load page' do
#session.visit '/docker/compose/releases/tag/1.27.0'
#session.find_link('Source code (zip)')
#session.click_link('Source code (zip)')
sleep 3
f = File.join('compose-1.27.0.zip')
expect(File.exists?(f)).to be true
File.delete(f)
end
end
Dockerfile
FROM ruby:2.7
RUN gem install bundler
COPY Gemfile Gemfile
COPY Gemfile.lock Gemfile.lock
RUN bundle install
COPY . .
RUN chmod 777 .
CMD ["bundle", "exec", "rspec", "spec"]
docker-compose.yml
version: '3.7'
networks:
mynet:
services:
rspec-chrome:
container_name: rspec-chrome
image: rspec-chrome
build:
context: .
dockerfile: Dockerfile
environment:
CHROME_URL: http://chrome:4444/wd/hub
stdin_open: true
tty: true
networks:
mynet:
depends_on:
- chrome
chrome:
container_name: chrome
image: selenium/standalone-chrome
networks:
mynet:
volumes:
- /dev/shm:/dev/shm
Output when running rspec
basic_tests
Load page
Finished in 8.45 seconds (files took 6.79 seconds to load)
1 example, 0 failures
Output when running docker-compose up -d --build
docker logs -f rspec-chrome
basic_tests
Load page (FAILED - 1)
Failures:
1) basic_tests Load page
Failure/Error: expect(File.exists?(f)).to be true
expected true
got false
# /spec/test/demo_spec.rb:17:in `block (2 levels) in <top (required)>'
When you have Chrome download files they would be downloaded to the Chrome container, so to access them from the container running the tests you probably want to create a shared volume between the two containers and mount it as Chromes download directory.
The following modifications resolved my issue.
spec/spec_helper.rb
Pass the following prefs in chromeOptions
"prefs" => {
'download.default_directory' => '/tmp',
'download.directory_upgrade' => true,
'download.prompt_for_download' => false
}
Here is the complete file
require 'colorize'
require 'capybara/dsl'
require 'capybara/rspec'
require 'byebug'
RSpec.configure do |config|
config.color = true
config.tty = true
config.formatter = :documentation
config.include Capybara::DSL
end
def create_web_session
Capybara.app_host = 'https://github.com'
Capybara.run_server = false # don't start Rack
if ENV['CHROME_URL']
Capybara.register_driver :selenium_chrome_headless do |app|
args = [
'--no-default-browser-check',
'--start-maximized',
'--headless',
'--disable-dev-shm-usage',
'--whitelisted-ips'
]
caps = Selenium::WebDriver::Remote::Capabilities.chrome("chromeOptions" => {
"args" => args,
"prefs" => {
'download.default_directory' => '/tmp',
'download.directory_upgrade' => true,
'download.prompt_for_download' => false
}
})
Capybara::Selenium::Driver.new(
app,
browser: :remote,
desired_capabilities: caps,
url: ENV['CHROME_URL']
)
end
end
#session = Capybara::Session.new(:selenium_chrome_headless)
##session = Capybara::Session.new(:selenium_chrome)
end
spec/test/demo_spec.rb
Change directory to /tmp and look for the download in /tmp
require 'spec_helper.rb'
require 'webdrivers/chromedriver'
sleep 1
RSpec.describe 'basic_tests', type: :feature do
before(:each) do
#session = create_web_session
Dir.chdir "/tmp"
end
it 'Load page' do
#session.visit '/docker/compose/releases/tag/1.27.0'
#session.find_link('Source code (zip)')
#session.click_link('Source code (zip)')
sleep 3
f = File.join('/tmp','compose-1.27.0.zip')
expect(File.exists?(f)).to be true
File.delete(f)
end
end
docker-compose.yml
Share /tmp as a docker volume between the rspec and chrome containers
version: '3.7'
networks:
mynet:
volumes:
downloads:
services:
rspec-chrome:
container_name: rspec-chrome
image: rspec-chrome
build:
context: .
dockerfile: Dockerfile
environment:
CHROME_URL: http://chrome:4444/wd/hub
stdin_open: true
tty: true
networks:
mynet:
depends_on:
- chrome
volumes:
- downloads:/tmp
chrome:
container_name: chrome
image: selenium/standalone-chrome
networks:
mynet:
volumes:
- /dev/shm:/dev/shm
- downloads:/tmp

actioncable not working with devise on production

i can't run properly actioncable on my ubuntu server, with docker
It seems to be a kind of authentification error with devise, this is my terminal log
There was an exception - NoMethodError(undefined method `user' for nil:NilClass)
cable_1 | /var/www/acim/public/app/channels/application_cable/connection.rb:17:in `find_verified_user'
This is my /app/chanels/application_cable/connection.rb
module ApplicationCable
class Connection < ActionCable::Connection::Base
identified_by :current_user
def connect
self.current_user = find_verified_user
logger.add_tags 'ActionCable', current_user.email
end
protected
def find_verified_user
if (current_user = env['warden'].user)
#if current_user = User.find_by(id: cookies.signed[:user_id])
current_user
else
reject_unauthorized_connection
end
end
end
end
* And here the config/environemments/production.rb *
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = true
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
config.action_cable.url = 'ws://192.168.99.100/cable'
config.action_cable.allowed_request_origins = [ 'http://192.168.99.100', /http:\/\/192.168.99.100.*/, 'http://localhost' ]
#config.action_cable.url = [/ws:\/\/*/, /wss:\/\/*/]
#config.action_cable.allowed_request_origins = [/http:\/\/*/, /https:\/\/*/]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "ACIM_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# ne pas directement servir les fichiers public
# en utilisant ruby, mais passons par NGINX
config.public_file_server.enabled = false
config.assets.js_compressor = Uglifier.new(harmony: true)
end
Rails.application.config.assets.precompile += %w( *.js ^[^_]*.css *.css.erb )
Here my docker-compose.yml file
version: '3'
services:
db:
image: postgres:9.6
environment:
- ACIM_DATABASE_PASSWORD=ACIM_2018
volumes:
- 'db:/var/lib/postgresql/data'
env_file:
- '.env'
expose:
- '5432'
ports:
- "7000:5432"
nginx:
image: nginx:latest
container_name: production_nginx
volumes:
- ./config/nginx/nginx.conf:/etc/nginx/nginx.conf
- public-content:/var/www/acim/public
ports:
- 80:80
- 443:443
links:
- web
redis:
image: redis
command: redis-server
volumes:
- 'redis:/data'
ports:
- "6379"
sidekiq:
build: .
#command: sidekiq -C config/sidekiq.yml
command: bundle exec sidekiq
volumes:
- .:/ACIM
links:
- db
- redis
depends_on:
- db
- redis
env_file:
- '.env'
web:
build: .
# command: bundle exec rails s -p 3000 -b '0.0.0.0'
volumes:
- bundle_cache:/bundle
- public-content:/var/www/acim/public
- .:/ACIM
ports:
- "5000:3000"
depends_on:
- db
- redis
env_file:
- '.env'
cable:
depends_on:
- 'redis'
build: .
command: puma -p 28080 cable/config.ru
ports:
- '28080:28080'
volumes:
- '.:/ACIM'
env_file:
- '.env'
volumes:
bundle_cache:
redis:
db:
public-content:
Please help,
This is killing me since 2 days now !
Thanks in advance
I have a working setup using the warden cookie variant that you have commented plus a warden_hooks initializer to manage the creation and invalidation of the cookie
/app/chanels/application_cable/connection.rb
module ApplicationCable
class Connection < ActionCable::Connection::Base
identified_by :current_user
def connect
self.current_user = find_verified_user
end
private
def find_verified_user
if current_user = User.find_by(id: cookies.signed['user_token'])
current_user.id
else
reject_unauthorized_connection
end
end
end
end
/app/config/initializers/warden_hooks.rb
# frozen_string_literal: true
# Set user_token cookie after sign in
Warden::Manager.after_set_user do |user, auth, opts|
scope = opts[:scope]
auth.cookies.signed["#{scope}_token"] = user.id
end
# Invalidate user.id cookie on sign out
Warden::Manager.before_logout do |user, auth, opts|
scope = opts[:scope]
auth.cookies.signed["#{scope}_token"] = nil
end
Finally i'm able to get everything working after some changes, so in case of it can help someone here is what worked for me:
/app/chanels/application_cable/connection.rb
module ApplicationCable
class Connection < ActionCable::Connection::Base
identified_by :current_user
def connect
# This is a websocket so we have no warden and no session here
# How to reuse the login made with devise?
# http://www.rubytutorial.io/actioncable-devise-authentication/
self.current_user = find_verified_user
logger.info("current_user: #{self.current_user.inspect}")
logger.add_tags "ActionCable", current_user.email
end
protected
def find_verified_user
verified_user = User.find_by(id: cookies.signed['user.id'])
if verified_user && cookies.signed['user.expires_at'] > Time.now
verified_user
else
reject_unauthorized_connection
end
end
end
end
/app/config/initializers/warden_hooks.rb
# http://www.rubytutorial.io/actioncable-devise-authentication/
Warden::Manager.after_set_user do |user,auth,opts|
scope = opts[:scope]
auth.cookies.signed["#{scope}.id"] = user.id
auth.cookies.signed["#{scope}.expires_at"] = 30.minutes.from_now
end
Warden::Manager.before_logout do |user, auth, opts|
scope = opts[:scope]
auth.cookies.signed["#{scope}.id"] = nil
auth.cookies.signed["#{scope}.expires_at"] = nil
end
** config/environemments/production.rb**
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = true
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
#config.action_cable.url = 'ws://192.168.99.100/cable'
#config.action_cable.allowed_request_origins = [ 'http://192.168.99.100', /http:\/\/192.168.99.100.*/, 'http://localhost' ]
config.action_cable.url = 'ws://192.168.99.100/cable'
config.action_cable.allowed_request_origins = [/http:\/\/*/, /https:\/\/*/]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "ACIM_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# ne pas directement servir les fichiers public
# en utilisant ruby, mais passons par NGINX
config.public_file_server.enabled = false
config.assets.js_compressor = Uglifier.new(harmony: true)
end
Rails.application.config.assets.precompile += %w( *.js ^[^_]*.css *.css.erb )
config/initializers/sidekiq.yml
Sidekiq.configure_server do |config|
config.redis = { url: 'redis://192.168.99.100:6379' }
end
Sidekiq.configure_client do |config|
config.redis = { url: 'redis://192.168.99.100:6379' }
end
** Dockerfile **
FROM ruby:2.5.1
# Install dependencies
RUN apt-get update -qq && apt-get install -y build-essential libpq-dev nodejs
ENV RAILS_ROOT /var/www/acim/public
RUN mkdir -p $RAILS_ROOT
RUN mkdir -p $RAILS_ROOT/log
RUN rm -rf /var/www/acim/public/tmp/pids/server.pid
# Set working directory, where the commands will be ran:
WORKDIR $RAILS_ROOT
# Setting env up
ENV RAILS_ENV='production'
ENV RACK_ENV='production'
# Adding gems
COPY Gemfile Gemfile
COPY Gemfile.lock Gemfile.lock
RUN bundle install --jobs 20 --retry 5 --without development test
# Adding project files
COPY . .
RUN bundle exec rake assets:precompile
EXPOSE 3000
CMD ["bundle", "exec", "puma", "-C", "config/puma.rb"]
docker-compose.yml
version: '3'
services:
db:
image: postgres:9.6
environment:
- ACIM_DATABASE_PASSWORD=ACIM_2018
volumes:
- 'db:/var/lib/postgresql/data'
env_file:
- '.env'
expose:
- '5432'
ports:
- "7000:5432"
nginx:
image: nginx:latest
container_name: production_nginx
volumes:
- ./config/nginx/nginx.conf:/etc/nginx/nginx.conf
- public-content:/var/www/acim/public
ports:
- 80:80
- 443:443
links:
- web
# maildev:
# image: djfarrelly/maildev
# ports:
# - "2000:80"
redis:
image: redis
command: redis-server
volumes:
- 'redis:/data'
ports:
- "6379:6379"
sidekiq:
build: .
#command: sidekiq -C config/sidekiq.yml
command: bundle exec sidekiq
volumes:
- .:/ACIM
links:
- db
- redis
depends_on:
- db
- redis
env_file:
- '.env'
web:
build: .
volumes:
- bundle_cache:/bundle
- public-content:/var/www/acim/public
- .:/ACIM
ports:
- "5000:3000"
depends_on:
- db
- redis
env_file:
- '.env'
cable:
depends_on:
- 'redis'
- 'sidekiq'
build: .
command: puma -p 28080 cable/config.ru
ports:
- '28080:28080'
volumes:
- '.:/ACIM'
env_file:
- '.env'
volumes:
bundle_cache:
redis:
db:
public-content:

RAILS_ENV=development rake db:create is not creating development database

I have a strange issue on a rails app running on a docker container. I run RAILS_ENV=development db:drop db:create and it tries to create/drop only the test database.
Rails 5.0.0
my config/database.yml is
default: &default
adapter: postgresql
encoding: unicode
pool: <%= ENV.fetch("RAILS_MAX_THREADS") { 5 } %>
development:
<<: *default
database: appname-api_development
username: appname
password: appname
host: <%= ENV['DOCKER_DB_HOST'] || 'localhost' %>
test:
<<: *default
database: appname-api_test<%= ENV['TEST_ENV_NUMBER'] %>
username: appname
password: appname
host: <%= ENV['DOCKER_DB_HOST'] || 'localhost' %>
and this is what I get:
root#9176b57db829:/appname# RAILS_ENV=development rake db:drop db:create
Dropped database 'db'
Dropped database 'appname-api_test'
Created database 'db'
Created database 'appname-api_test'
This is running on a docker container and there is no env set for RAILS_ENV or RACK_ENV or anything like that.
This is my docker-compose.yml:
version: '2'
services:
db:
image: postgres
restart: always
environment:
POSTGRES_USER: appname
POSTGRES_PASSWORD: appname
volumes:
- "${HOME}/.postgres-data:/var/lib/postgresql/data"
redis:
image: redis:3.2
api:
build: .
environment:
DOCKER_DB_HOST: 'db'
REDIS_PROVIDER: 'redis://redis:6379/1'
REDIS_URL: 'redis://redis:6379/1'
SMTP_HOST: 'localhost:3000'
# command: bundle exec rails s -b "0.0.0.0"
volumes:
- "${PWD}:/appname"
ports:
- "3000:3000"
- "4000:4000"
depends_on:
- db
- redis
networks:
default:
aliases:
- api
And this is my config/environments/development.rb file:
# frozen_string_literal: true
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
config.action_cable.url = ENV["ACTIONCABLE_URL"]
config.action_cable.allowed_request_origins = [ ENV["FRONTEND_ORIGIN"] ]
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join("tmp/caching-dev.txt").exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=172800",
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
config.action_mailer.delivery_method = :letter_opener
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Uses Guard and LiveReload to reload html/css changes automatically
config.middleware.insert_after ActionDispatch::Static, Rack::LiveReload
end

Configure Rails 3 for S3 bucket using fog.yml

I have this Rails 3.2 application passed on from the previous freelancer in my company. I would like to know as to how to configure this existing S3 bucket to my new EC2 Instance.
Access to AWS is configured in fog.yml by using fog gem as well as carrierwave gem.
What am I missing in this fog.yml file
Fog.credentials_path = Rails.root.join('config/fog.yml')
CarrierWave.configure do |config|
config.fog_credentials = {
}
config.fog_directory = "directory-name"
config.fog_public = false
config.fog_attributes = {'Cache-Control'=>'max-age=315576000'}
end
#config/initializers/carrierwave.rb
CarrierWave.configure do |config|
config.fog_credentials = {
:provider => 'AWS',
:aws_access_key_id => 'aws_key',
:aws_secret_access_key => 'aws_secret'
}
config.fog_directory = 'bucket1'
config.fog_host = 'https://s3.amazonaws.com'
config.fog_public = true
config.fog_attributes = {'Cache-Control'=>'max-age=315576000'}
end
#app/uploader/image_uploader.rb
def store_dir
"images/#{model.class.to_s.underscore}"
end
#app/views/pictures/show.html.erb
<%= image_tag #picture.image_url if #picture.image? %>`enter code here`
I'd take a look at these examples:
http://fog.io/about/getting_started.html
It looks like the format is basically:
development:
aws_access_key_id: 'XXXXXXXXXXXXXXX'
aws_secret_access_key: 'XXXXXXXXXXXXXXX'
provider: 'AWS'
test:
aws_access_key_id: 'XXXXXXXXXXXXXXX'
aws_secret_access_key: 'XXXXXXXXXXXXXXX'
provider: 'AWS'
# set these environment variables on your prod server
production:
aws_access_key_id: <%= ENV['AWS_ACCESS_KEY_ID'] %>
aws_secret_access_key: <%= ENV['AWS_SECRET_ACCESS_KEY'] %>
provider: 'AWS'
Make sure to use spaces rather than tabs since it's a YAML file.

Resources