django.db.utils.ProgrammingError: can't adapt type 'dict' after I tried to add a Hstorefield to my model - migrate

I am developing an app in Django.
I tried to add a Hstorefield to my model, then I deleted it, and now I cannot anymore run the command manage.py makemigrations because I always get this error
(met5) C:\Users\Tommaso\Django rest framework\Udemy Django\Metaglossario_Gestisco>python manage.py migrate Operations to perform: Apply all migrations: admin, app_metaglossario, auth, contenttypes, sessions Running migrations: Applying app_metaglossario.0045_model_node_data...Traceback (most recent call last): File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 84, in _execute
return self.cursor.execute(sql, params) psycopg2.ProgrammingError: can't adapt type 'dict'
The above exception was the direct cause of the following exception:
Traceback (most recent call last): File "manage.py", line 21, in <module>
main() File "manage.py", line 17, in main
execute_from_command_line(sys.argv) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\__init__.py", line 381, in execute_from_command_line
utility.execute() File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\__init__.py", line 375, in execute
self.fetch_command(subcommand).run_from_argv(self.argv) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\base.py", line 323, in run_from_argv
self.execute(*args, **cmd_options) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\base.py", line 364, in execute
output = self.handle(*args, **options) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\base.py", line 83, in wrapped
res = handle_func(*args, **kwargs) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\commands\migrate.py", line 234, in handle
fake_initial=fake_initial, File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\migrations\executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\migrations\executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\migrations\executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\migrations\migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\migrations\operations\fields.py", line 112, in database_forwards
field, File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\base\schema.py", line 447, in add_field
self.execute(sql, params) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\base\schema.py", line 137, in execute
cursor.execute(sql, params) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 99, in execute
return super().execute(sql, params) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 67, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 76, in _execute_with_wrappers
return executor(sql, params, many, context) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 84, in _execute
return self.cursor.execute(sql, params) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\utils.py", line 89, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 84, in _execute
return self.cursor.execute(sql, params) django.db.utils.ProgrammingError: can't adapt type 'dict'
(met5) C:\Users\Tommaso\Django rest framework\Udemy Django\Metaglossario_Gestisco>python manage.py makemigrations Migrations for 'app_metaglossario': app_metaglossario\migrations\0054_model_node.py
- Create model model_node
(met5) C:\Users\Tommaso\Django rest framework\Udemy Django\Metaglossario_Gestisco>python manage.py migrate Operations to perform: Apply all migrations: admin, app_metaglossario, auth, contenttypes, sessions Running migrations: Applying app_metaglossario.0045_model_node_data...Traceback (most recent call last): File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 84, in _execute
return self.cursor.execute(sql, params) psycopg2.ProgrammingError: can't adapt type 'dict'
The above exception was the direct cause of the following exception:
Traceback (most recent call last): File "manage.py", line 21, in <module>
main() File "manage.py", line 17, in main
execute_from_command_line(sys.argv) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\__init__.py", line 381, in execute_from_command_line
utility.execute() File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\__init__.py", line 375, in execute
self.fetch_command(subcommand).run_from_argv(self.argv) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\base.py", line 323, in run_from_argv
self.execute(*args, **cmd_options) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\base.py", line 364, in execute
output = self.handle(*args, **options) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\base.py", line 83, in wrapped
res = handle_func(*args, **kwargs) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\core\management\commands\migrate.py", line 234, in handle
fake_initial=fake_initial, File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\migrations\executor.py", line 117, in migrate
state = self._migrate_all_forwards(state, plan, full_plan, fake=fake, fake_initial=fake_initial) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\migrations\executor.py", line 147, in _migrate_all_forwards
state = self.apply_migration(state, migration, fake=fake, fake_initial=fake_initial) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\migrations\executor.py", line 245, in apply_migration
state = migration.apply(state, schema_editor) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\migrations\migration.py", line 124, in apply
operation.database_forwards(self.app_label, schema_editor, old_state, project_state) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\migrations\operations\fields.py", line 112, in database_forwards
field, File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\base\schema.py", line 447, in add_field
self.execute(sql, params) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\base\schema.py", line 137, in execute
cursor.execute(sql, params) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 99, in execute
return super().execute(sql, params) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 67, in execute
return self._execute_with_wrappers(sql, params, many=False, executor=self._execute) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 76, in _execute_with_wrappers
return executor(sql, params, many, context) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 84, in _execute
return self.cursor.execute(sql, params) File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\utils.py", line 89, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value File "C:\Applicazioni_Tommaso\Phyton\lib\site-packages\django\db\backends\utils.py", line 84, in _execute
return self.cursor.execute(sql, params) django.db.utils.ProgrammingError: can't adapt type 'dict'
It looks like that Hstorefiels is still having some effects on my database.
I tried to empty the model in which I pu the Hstorefield, then delete it, but nothing.
I even reset my database from Heroku, but the error keeps popping up.
How can I get rid of it?

Read the Django documentation for 2.0 or above, it is clearly stated that You need to Setup the hstore extension in PostgreSQL. I was also getting the same error, to solve this try this:
sudo su - postgres //switch to postgres user
\c database; //connect your database
CREATE EXTENSION IF NOT EXISTS hstore; //create extension

Follow these steps to get it working
In your settings add the django.contrib.postgres as part of the installed apps.
create a migration before the migration that used the hstore field and Copy and paste the code below in the migration file.
from django.db import models, migrations
from django.contrib.postgres.operations import HStoreExtension, UnaccentExtension
class Migration(migrations.Migration):
dependencies = [
('main', '0001_initial'),
]
operations = [
HStoreExtension(),
UnaccentExtension()
]
Create the migration that adds the hstore fields.
Run migration.
This worked for me!

Related

cannot create piepenv in linux ubuntu

hello everyone…i have problem in launching venv using pipenv…i never had this problem in windows…I have recently migrated to linux Ubuntu…so i cannot solve this problem…I would be grateful if someone could help me
Creating a virtualenv for this project...
Pipfile: /media/sasan/F/my projects/Djprojects/newprj/Pipfile
Using /usr/bin/python3 (3.10.4) to create virtualenv...
⠸ Creating virtual environment...created virtual environment CPython3.10.4.final.0-64 in 106ms
creator CPython3Posix(dest=/home/sasan/.local/share/virtualenvs/newprj-cXkKfH7p, clear=False, no_vcs_ignore=False, global=False)
seeder FromAppData(download=False, pip=bundle, setuptools=bundle, wheel=bundle, via=copy, app_data_dir=/home/sasan/.local/share/virtualenv)
added seed packages: pip==22.2.2, setuptools==65.3.0, wheel==0.37.1
activators BashActivator,CShellActivator,FishActivator,NushellActivator,PowerShellActivator,PythonActivator
✔ Successfully created virtual environment!
Traceback (most recent call last):
File "/usr/bin/pipenv", line 33, in <module>
sys.exit(load_entry_point('pipenv==11.9.0', 'console_scripts', 'pipenv')())
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/vendor/click/core.py", line 1128, in __call__
return self.main(*args, **kwargs)
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/cli/options.py", line 57, in main
return super().main(*args, **kwargs, windows_expand_args=False)
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/vendor/click/core.py", line 1053, in main
rv = self.invoke(ctx)
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/vendor/click/core.py", line 1659, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/vendor/click/core.py", line 1395, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/vendor/click/core.py", line 754, in invoke
return __callback(*args, **kwargs)
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/vendor/click/decorators.py", line 84, in new_func
return ctx.invoke(f, obj, *args, **kwargs)
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/vendor/click/core.py", line 754, in invoke
return __callback(*args, **kwargs)
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/cli/command.py", line 397, in shell
do_shell(
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/core.py", line 2520, in do_shell
ensure_project(
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/core.py", line 525, in ensure_project
ensure_virtualenv(
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/core.py", line 458, in ensure_virtualenv
do_create_virtualenv(
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/core.py", line 959, in do_create_virtualenv
project._environment = Environment(
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/environment.py", line 79, in __init__
self._base_paths = self.get_paths()
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/environment.py", line 390, in get_paths
c = subprocess_run(command)
File "/home/sasan/.local/lib/python3.10/site-packages/pipenv/utils/processes.py", line 75, in subprocess_run
return subprocess.run(
File "/usr/lib/python3.10/subprocess.py", line 501, in run
with Popen(*popenargs, **kwargs) as process:
File "/usr/lib/python3.10/subprocess.py", line 966, in __init__
self._execute_child(args, executable, preexec_fn, close_fds,
File "/usr/lib/python3.10/subprocess.py", line 1842, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] No such file or directory: '/home/sasan/.local/share/virtualenvs/newprj-cXkKfH7p/bin/python'

Superset exporting and importing the Dashboards using CLI (for versioning control)

Goal: My goal is to export the all the dashboards from the superset so that I can save them as backup and also make sure that I can import them.
Existing setup: I am cloning superset from git and using docker-compose -f docker-compose-non-dev.yml up
to setup and run the superset (following this documentation)
Issue:
Using superset CLI: I am using this documentation
to connect to the existing docker image on which superset is running and then running superset export-dashboards but it throws the following error:
Loaded your LOCAL configuration at [/app/docker/pythonpath_dev/superset_config.py]
logging was configured successfully
2021-10-20 06:54:49,727:INFO:superset.utils.logging_configurator:logging was configured successfully
2021-10-20 06:54:49,732:INFO:root:Configured event logger of type <class 'superset.utils.log.DBEventLogger'>
/usr/local/lib/python3.8/site-packages/flask_caching/__init__.py:201: UserWarning: Flask-Caching: CACHE_TYPE is set to null, caching is effectively disabled.
warnings.warn(
Starting export
2021-10-20 06:54:50,962:INFO:superset.utils.dashboard_import_export:Starting export
Traceback (most recent call last):
File "/usr/local/bin/superset", line 33, in <module>
sys.exit(load_entry_point('apache-superset', 'console_scripts', 'superset')())
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 829, in __call__
return self.main(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/flask/cli.py", line 586, in main
return super(FlaskGroup, self).main(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 782, in main
rv = self.invoke(ctx)
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 1259, in invoke
return _process_result(sub_ctx.command.invoke(sub_ctx))
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 1066, in invoke
return ctx.invoke(self.callback, **ctx.params)
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/flask/cli.py", line 426, in decorator
return __ctx.invoke(f, *args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/click/decorators.py", line 21, in new_func
return f(get_current_context(), *args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/flask/cli.py", line 426, in decorator
return __ctx.invoke(f, *args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/click/core.py", line 610, in invoke
return callback(*args, **kwargs)
File "/app/superset/cli.py", line 413, in export_dashboards
data = dashboard_import_export.export_dashboards(db.session)
File "/app/superset/utils/dashboard_import_export.py", line 33, in export_dashboards
data = Dashboard.export_dashboards(dashboard_ids)
File "/app/superset/models/dashboard.py", line 385, in export_dashboards
json_metadata = json.loads(dashboard.json_metadata)
File "/usr/local/lib/python3.8/json/__init__.py", line 341, in loads
raise TypeError(f'the JSON object must be str, bytes or bytearray, '
TypeError: the JSON object must be str, bytes or bytearray, not NoneType
Kindly help me debug, I have also changed the "VERSIONED_EXPORT": True

ERROR: while installing Ipex using docker centos image

Unable to install Ipex using docker centos image
I pulled this docker image: docker pull sysstacks/dlrs-pytorch-centos
Tried to run on my linux machine with this command: docker run -it sysstacks/dlrs-pytorch-centos bash
I was trying to install Ipex using centos docker image (image name: sysstacks/dlrs-pytorch-centos) unfortunately i got this error.
after setting env
enter image description here
[root#0d96884d3a05 /]# python -m pip install torch_ipex==1.9.0 -f https://software.intel.com/ipex-whl-stable
Looking in links: https://software.intel.com/ipex-whl-stable
ERROR: Exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.6/site-packages/pip/_internal/cli/base_command.py", line 180, in _main
status = self.run(options, args)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/cli/req_command.py", line 204, in wrapper
return func(self, options, args)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/commands/install.py", line 319, in run
reqs, check_supported_wheels=not options.target_dir
File "/usr/local/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/resolver.py", line 128, in resolve
requirements, max_rounds=try_to_avoid_resolution_too_deep
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/resolvelib/resolvers.py", line 473, in resolve
state = resolution.resolve(requirements, max_rounds=max_rounds)
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/resolvelib/resolvers.py", line 341, in resolve
name, crit = self._merge_into_criterion(r, parent=None)
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/resolvelib/resolvers.py", line 172, in _merge_into_criterion
if not criterion.candidates:
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/resolvelib/structs.py", line 139, in __bool__
return bool(self._sequence)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py", line 143, in __bool__
return any(self)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py", line 129, in <genexpr>
return (c for c in iterator if id(c) not in self._incompatible_ids)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py", line 30, in _iter_built
for version, func in infos:
File "/usr/local/lib/python3.6/site-packages/pip/_internal/resolution/resolvelib/factory.py", line 272, in iter_index_candidate_infos
hashes=hashes,
File "/usr/local/lib/python3.6/site-packages/pip/_internal/index/package_finder.py", line 879, in find_best_candidate
candidates = self.find_all_candidates(project_name)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/index/package_finder.py", line 824, in find_all_candidates
page_candidates = list(page_candidates_it)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/index/sources.py", line 134, in page_candidates
yield from self._candidates_from_page(self._link)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/index/package_finder.py", line 783, in process_project_url
html_page = self._link_collector.fetch_page(project_url)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/index/collector.py", line 512, in fetch_page
return _get_html_page(location, session=self.session)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/index/collector.py", line 422, in _get_html_page
resp = _get_html_response(url, session=session)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/index/collector.py", line 137, in _get_html_response
"Cache-Control": "max-age=0",
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/requests/sessions.py", line 555, in get
return self.request('GET', url, **kwargs)
File "/usr/local/lib/python3.6/site-packages/pip/_internal/network/session.py", line 449, in request
return super().request(method, url, *args, **kwargs)
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/requests/sessions.py", line 542, in request
resp = self.send(prep, **send_kwargs)
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/requests/sessions.py", line 655, in send
r = adapter.send(request, **kwargs)
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/cachecontrol/adapter.py", line 53, in send
resp = super(CacheControlAdapter, self).send(request, **kw)
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/requests/adapters.py", line 449, in send
timeout=timeout
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/urllib3/connectionpool.py", line 696, in urlopen
self._prepare_proxy(conn)
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/urllib3/connectionpool.py", line 964, in _prepare_proxy
conn.connect()
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/urllib3/connection.py", line 359, in connect
conn = self._connect_tls_proxy(hostname, conn)
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/urllib3/connection.py", line 506, in _connect_tls_proxy
ssl_context=ssl_context,
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/urllib3/util/ssl_.py", line 432, in ssl_wrap_socket
ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
File "/usr/local/lib/python3.6/site-packages/pip/_vendor/urllib3/util/ssl_.py", line 474, in _ssl_wrap_socket_impl
return ssl_context.wrap_socket(sock)
File "/usr/lib64/python3.6/ssl.py", line 365, in wrap_socket
_context=self, _session=session)
File "/usr/lib64/python3.6/ssl.py", line 732, in __init__
raise ValueError("check_hostname requires server_hostname")
NOTE:
I have tried all these methods shown below but none of them worked
Why requests raise this exception "check_hostname requires server_hostname"?
I think the issue is with your proxy, can you please try below commands in your docker container:
export HTTP_PROXY=http://x.x.x.x
export https_proxy=http://x.x.x.x
export HTTPS_PROXY=http://x.x.x.x
export http_proxy=http://x.x.x.x
Change x with your proxy ip

Use google default credentials on local docker run

I have the same problem asked on this question, but the provided solution does not work for me.
Basically, I want to run my docker image, with entrypoint run_query.py, locally. I have issues with credentials when I try to run a Bigquery job.
When I try to run my
docker run -v ~/.config/:/root/.config my-image-name --param1 ...
I get this error
Traceback (most recent call last):
File "run_query.py", line 97, in <module>
query_params=params)
File "run_query.py", line 54, in create_table
query_job = client.query(query, job_config=job_config)
File "/usr/local/lib/python3.7/dist-packages/google/cloud/bigquery/client.py", line 2467, in query
query_job._begin(retry=retry, timeout=timeout)
File "/usr/local/lib/python3.7/dist-packages/google/cloud/bigquery/job.py", line 3156, in _begin
super(QueryJob, self)._begin(client=client, retry=retry, timeout=timeout)
File "/usr/local/lib/python3.7/dist-packages/google/cloud/bigquery/job.py", line 638, in _begin
retry, method="POST", path=path, data=self.to_api_repr(), timeout=timeout
File "/usr/local/lib/python3.7/dist-packages/google/cloud/bigquery/client.py", line 558, in _call_api
return call()
File "/usr/local/lib/python3.7/dist-packages/google/api_core/retry.py", line 286, in retry_wrapped_func
on_error=on_error,
File "/usr/local/lib/python3.7/dist-packages/google/api_core/retry.py", line 184, in retry_target
return target()
File "/usr/local/lib/python3.7/dist-packages/google/cloud/_http.py", line 419, in api_request
timeout=timeout,
File "/usr/local/lib/python3.7/dist-packages/google/cloud/_http.py", line 277, in _make_request
method, url, headers, data, target_object, timeout=timeout
File "/usr/local/lib/python3.7/dist-packages/google/cloud/_http.py", line 315, in _do_request
url=url, method=method, headers=headers, data=data, timeout=timeout
File "/usr/local/lib/python3.7/dist-packages/google/auth/transport/requests.py", line 444, in request
self.credentials.before_request(auth_request, method, url, request_headers)
File "/usr/local/lib/python3.7/dist-packages/google/auth/credentials.py", line 133, in before_request
self.refresh(request)
File "/usr/local/lib/python3.7/dist-packages/google/oauth2/credentials.py", line 198, in refresh
self._scopes,
File "/usr/local/lib/python3.7/dist-packages/google/oauth2/_client.py", line 248, in refresh_grant
response_data = _token_endpoint_request(request, token_uri, body)
File "/usr/local/lib/python3.7/dist-packages/google/oauth2/_client.py", line 124, in _token_endpoint_request
_handle_error_response(response_body)
File "/usr/local/lib/python3.7/dist-packages/google/oauth2/_client.py", line 60, in _handle_error_response
raise exceptions.RefreshError(error_details, response_body)
I also tried to use -v ~/.config/gcloud/:/root/.config/gcloud, but I get the same result.
Keep in mind that using this image into a Kubeflow Pipeline works smoothly.
Did I misinterpret the solution from the previous question? What am I missing?

docker-compose command is giving error while building

I am pretty amateur to docker. I was running my application on docker successfully from many days.
Now when I run the command docker-compose up -d it gives me the error as below
raceback (most recent call last):
File "docker-compose", line 3, in <module>
File "compose/cli/main.py", line 88, in main
File "compose/cli/main.py", line 140, in perform_command
File "compose/cli/main.py", line 900, in up
File "compose/project.py", line 385, in up
File "compose/project.py", line 590, in warn_for_swarm_mode
File "site-packages/docker/api/daemon.py", line 73, in info
File "site-packages/docker/utils/decorators.py", line 47, in inner
File "site-packages/docker/api/client.py", line 179, in _get
File "site-packages/requests/sessions.py", line 488, in get
File "site-packages/requests/sessions.py", line 466, in request
File "site-packages/requests/sessions.py", line 641, in
merge_environment_settings
File "site-packages/requests/utils.py", line 605, in get_environ_proxies
File "site-packages/requests/utils.py", line 589, in should_bypass_proxies
File "urllib.py", line 1510, in proxy_bypass
File "urllib.py", line 1484, in proxy_bypass_macosx_sysconf
ValueError: negative shift count
Failed to execute script docker-compose
I searched this error ValueError: negative shift count a lot but nothing useful found.

Resources