GObject signals and GLib MainLoop - glib

I have an GObject-derived object that emits signals in some thread and I want to handle them in main thread, which runs GLib's MainLoop. Here is sample code that uses PyGObject:
import gi
from gi.repository import GObject, GLib
class SomeObj(GObject.Object, threading.Thread):
def __init__(self, device_path, terminate_event):
GObject.Object.__init__(self)
threading.Thread.__init__(self)
def run():
...
self.emit('sig')
...
#GObject.Signal
def sig(self):
pass
def callback(instance):
...
# will be called in obj's thread
loop = GLib.MainLoop()
obj = SomeObj()
self.watcher.connect('sig', callback)
obj.start()
loop.run()
callback() will be called in obj's thread. How to handle signal in main thread inside loop.run()?

Push an event to the main context of the main thread from your callback signal handler:
def callback(instance):
# None here means the global default GMainContext, which is running in your main thread
GLib.MainContext.invoke(None, callback_main, instance)
def callback_main(instance):
# Double check that we’re running in the main thread:
assert(GLib.MainContext.is_owner(None))
# … the code you want to be executed in the main thread …

Related

Use PySide6 in thread

Qt has a promising SCXML module. Since PySCXML is obsolete, there is no other native python scxml library, which lets me run a scxml statemachine. That's why I try PySide6.
Since I don't need any Qt despite of the scxml library, I thought about running the QCoreApplication in a seperate thread, in order to have the event-loop right there.
According to the documentation QScxmlStateMachine needs one.
Unfortunately my start_statemachine() method doesn't return, but the statemachine starts working.
Any advice on how to start a QScxmlStateMachine in a thread is welcomed.
from PySide6.QtCore import QCoreApplication, QObject
from PySide6.QtScxml import QScxmlStateMachine
from PySide6.QtCore import QTimer
import threading
def start_statemachine(filename):
app = QCoreApplication()
mysm = MyStateMachine(filename)
mysm.start_sm()
app.exec()
class MyStateMachine(QObject):
def __init__(self, filename):
super(MyStateMachine, self).__init__()
self.sm = QScxmlStateMachine.fromFile(filename)
self.counter = 0
self.timer = QTimer()
self.timer.setInterval(2000)
self.timer.timeout.connect(self.recurring_timer)
self.timer.start()
def start_sm(self):
print('starting statemachine')
self.sm.setRunning(True)
def recurring_timer(self):
print(self.sm.activeStateNames())
self.counter += 1
print("Counter: %d" % self.counter)
print('statemachine running status: ' + str(self.sm.isRunning()))
if __name__ == '__main__':
x = threading.Thread(target=start_statemachine('statemachine.scxml'))
x.start() #won't be reached
while True:
pass #do something else
x.join()
The thread target needs to be a reference to a function that will be called in the external thread, but you're not running start_statemachine() in another thread: you're actually executing it in place:
x = threading.Thread(target=start_statemachine('statemachine.scxml'))
^^^^^^^^^^^^^^^^^^^^^^
Your program is stuck there, no thread is even created because the constructor is still "waiting" for start_statemachine() to return, and since exec() is blocking, nothing else happens.
A basic solution could be to use a lambda:
x = threading.Thread(target=lambda: start_statemachine('statemachine.scxml'))
But you'll need access to the application in order to be able to quit it: x.join() won't do nothing, because the QCoreApplication event loop will keep going, so a possibility is to create a basic class that provides a reference to the application:
class StateMachineWrapper:
app = None
def __init__(self, filename):
self.filename = filename
def start(self):
self.app = QCoreApplication([])
mysm = MyStateMachine(self.filename)
mysm.start_sm()
self.app.exec()
# ...
if __name__ == '__main__':
statemachine = StateMachineWrapper('statemachine.scxml')
x = threading.Thread(target=statemachine.start)
x.start()
while True:
pass #do something else
if statemachine.app:
statemachine.app.quit()
x.join()

Is it possible to suspend and restart tasks in async Python?

The question should be simple enough, but I couldn't find anything about it.
I have an async Python program that contains a rather long-running task that I want to be able to suspend and restart at arbitrary points (arbitrary of course meaning everywhere where there's an await keyword).
I was hoping there was something along the lines of task.suspend() and task.resume() but it seems there isn't.
Is there an API for this on task- or event-loop-level or would I need to do this myself somehow? I don't want to place an event.wait() before every await...
What you're asking for is possible, but not trivial. First, note that you can never have suspends on every await, but only on those that result in suspension of the coroutine, such as asyncio.sleep(), or a stream.read() that doesn't have data ready to return. Awaiting a coroutine immediately starts executing it, and if the coroutine can return immediately, it does so without dropping to the event loop. await only suspends to the event loop if the awaitee (or its awaitee, etc.) requests it. More details in these questions: [1], [2], [3], [4].
With that in mind, you can use the technique from this answer to intercept each resumption of the coroutine with additional code that checks whether the task is paused and, if so, waits for the resume event before proceeding.
import asyncio
class Suspendable:
def __init__(self, target):
self._target = target
self._can_run = asyncio.Event()
self._can_run.set()
self._task = asyncio.ensure_future(self)
def __await__(self):
target_iter = self._target.__await__()
iter_send, iter_throw = target_iter.send, target_iter.throw
send, message = iter_send, None
# This "while" emulates yield from.
while True:
# wait for can_run before resuming execution of self._target
try:
while not self._can_run.is_set():
yield from self._can_run.wait().__await__()
except BaseException as err:
send, message = iter_throw, err
# continue with our regular program
try:
signal = send(message)
except StopIteration as err:
return err.value
else:
send = iter_send
try:
message = yield signal
except BaseException as err:
send, message = iter_throw, err
def suspend(self):
self._can_run.clear()
def is_suspended(self):
return not self._can_run.is_set()
def resume(self):
self._can_run.set()
def get_task(self):
return self._task
Test:
import time
async def heartbeat():
while True:
print(time.time())
await asyncio.sleep(.2)
async def main():
task = Suspendable(heartbeat())
for i in range(5):
print('suspending')
task.suspend()
await asyncio.sleep(1)
print('resuming')
task.resume()
await asyncio.sleep(1)
asyncio.run(main())

Clock Schedule Once Error in class functions/methods

I have a class with two functions in a Kivy app. One function calls the second with Clock.schedule_once(function_name).
class SivaCEFBrowser(Screen):
def back_to_login(self):
App.get_running_app().root.current='login_screen'
App.get_running_app().root.transition.direction='right'
def go_to_verify(self):
App.get_running_app().root.current='verify_screen'
App.get_running_app().root.transition.direction='left'
def launch_cef_browser(self):
sys.excepthook = cef.ExceptHook # To shutdown all CEF processes on error.
cef.Initialize()
cef.CreateBrowserSync(url="https://www.google.com/", window_title="Hello World!")
cef.MessageLoop()
cef.Shutdown()
def trigger_browser(self):
Clock.schedule_once(self.launch_cef_browser)
When my code runs, the trigger_browser() function is called which, in turn, invokes launch_cef_browser(). This is the error that I get:
What am I missing here?
You need to define launch_cef_browser function like def launch_cef_browser(self, *args).
The *args argument is being used by kivy for internal processing of the function.

Asyncio start a task inside another task?

i'm trying to learn the usage of asyncio but i've met a roadblock.
What am i trying to do? I'm trying to create a number of workers that as soon as they're created they start their own task. So while task3 is being created and started task1 should already be executing its task. I'm doing that by using a loop inside a single coroutine, at eache iteration the worker is created and starts.
The problem i'm facing: When the first worker completes its task the others just stop and don't continue.
This is my code:
import asyncio
class Worker:
def __init__(self, session_name):
self.name = session_name
self.messagelist = ['--------1', '--------2', '--------3', '--------4']
async def job(self):
for i, message in enumerate(self.messagelist):
print(f"### Worker {self.name} says {message}")
await asyncio.sleep(20)
class Testmanager:
def __init__(self):
self.workers_name = ['test0', 'test1', 'test2', 'test3', 'test4']
async def create_and_start_workers(self, loop):
for i, name in enumerate(self.workers_name):
worker = Worker(name)
print(f"# Created worker {worker.name}")
loop.create_task(worker.job())
print(f"## Started worker {worker.name}")
await asyncio.sleep(10)
def start(self):
loop = asyncio.get_event_loop()
loop.run_until_complete(self.create_and_start_workers(loop))
loop.close()
manager = Testmanager()
manager.start()
When run initially it works as expected, but after a while i get a lot of:
Task was destroyed but it is pending!
task: <Task pending coro=<Worker.job() done, defined at PATH_REDACTED> wait_for=<Future pending cb=[<TaskWakeupMethWrapper object at 0x0000026AF6315438>()]>>
What am i doing wrong?
Thanks for the help.
What am i doing wrong?
You are never awaiting the tasks you create to run in parallel. For example:
async def create_and_start_workers(self, loop):
tasks = []
for i, name in enumerate(self.workers_name):
worker = Worker(name)
print(f"# Created worker {worker.name}")
tasks.append(loop.create_task(worker.job()))
print(f"## Started worker {worker.name}")
await asyncio.sleep(10)
await asyncio.gather(*tasks)

Kivy - threads, queues, clocks and Python sockets

I'm brand new to Kivy, and also new to GUI, but not new to programming.
I am completely missing the boat, the canoe, and the airplane on using Kivy.
In 30 years of programming, from machine code, assembly, Fortran, C, C++, Java, Python, I've never tried to use a language such as Kivy who's documentation is this thin, because it's so new. I know it'll get better, but I'm trying to use it now.
In my code, I'm trying to implement Queueing, so that I can obtain Python socket data. In normal Python programming, I would have IPC via a Queue - put data in, get data out.
I understand from Kivy, mostly from what I've read in various forums, but can't say I've found it in the documentation at kivy.org, that I can't do the following:
Kivy needs to be in it's own thread.
Nothing in Kivy should sleep.
Nothing in Kivy should do blocking IO.
After a LOT of Google searching, the only thing I've actually found that approaches being useful, is an informative note here on StackOverFlow . However, while it almost solves my problem, the answer assumes I know more about Kivy than I do; I don't know how to incorporate the answer.
If someone could take the time to put together a COMPLETE short demo of using that example, or one of your own unique COMPLETE answers, I would much appreciate it!
Here's some short code I put together, but it doesn't work, because it blocks on the get() call.
from Queue import Queue
from kivy.lang import Builder
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.properties import StringProperty
from kivy.clock import Clock
from threading import Thread
class ClockedQueue(BoxLayout):
text1 = StringProperty("Start")
def __init__(self):
super(ClockedQueue,self).__init__()
self.q = Queue()
self.i=0
Clock.schedule_interval(self.get, 2)
def get(self,dt):
print("get entry")
val = self.q.get()
print(self.i + val)
self.i += 1
class ClockedQueueApp(App):
def build(self):
return ClockedQueue()
class SourceQueue(Queue):
def __init__(self):
q = Queue()
for word in ['First','Second']:
q.put(word)
print("SourceQueue finished.")
def main():
th = Thread(target=SourceQueue)
th.start()
ClockedQueueApp().run()
return 0
if __name__ == '__main__':
main()
Thanks!
Here's some short code I put together, but it doesn't work, because it blocks on the get() call.
So what you really want to do is get items from your queue in a non-blocking way?
There are multiple ways to do this. The simplest seems to be to just check if the queue has any items before getting one - Queue has several methods that help with this, including checking if it is empty or setting whether get is allowed to be blocking (by setting its first argument to False). If you just do this instead of calling get on its own, you won't block things waiting for the queue to have any items - if it's empty or you can't immediately get anything, you just do nothing.
I don't know what you want to do with the items you get from the queue, but if it's short operations that don't take long then you won't need anything more than this. For instance, you could Clock.schedule_interval the get method to happen every frame, do nothing if the queue is empty, or operate on the data if you get something back. No blocking, and no messing with your own threads.
You can also create your own thread and run the blocking code in it, which is general way to deal with blocking issues, especially tasks that can't be split up into short sections that can be performed between frames. I don't know about the details of this, but it should just involve using python threads normally. You can check the source of kivy's UrlRequest for an example, this can download a web source in a background thread.
Edit: Also your SourceQueue is messed up (you override its __init__ to make a new queue that you don't store anywhere), and your clock scheduling has a meaningless third argument false which isn't even defined. I don't know what's going on here, it probably affects what you're trying to do, but doesn't matter to my general answer above.
I was finally able to create something that worked.
Thanks everyone for your suggestions!
Here's the code (because I'm new, Stackoverflow wouldn't let me post it as answering my own question until 5:00 AM)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# threads_and_kivy.py
#
'''threads_and_kivy.py
Trying to build up a foundation that satisfies the following:
- has a thread that will implement code that:
- simulates reads data from a Python socket
- works on the data
- puts the data onto a Python Queue
- has a Kivy mainthread that:
- via class ShowGUI
- reads data from the Queue
- updates a class variable of type StringProperty so it will
update the label_text property.
'''
from threading import Thread
from Queue import Queue, Empty
import time
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.properties import StringProperty
from kivy.lang import Builder
from kivy.clock import Clock
kv='''
<ShowGUI>:
Label:
text: str(root.label_text)
'''
Builder.load_string(kv)
q = Queue()
class SimSocket():
global q
def __init__(self, queue):
self.q = queue
def put_on_queue(self):
print("<-----..threaded..SimSocket.put_on_queue(): entry")
for i in range(10):
print(".....threaded.....SimSocket.put_on_queue(): Loop " + str(i))
time.sleep(1)#just here to sim occassional data send
self.some_data = ["SimSocket.put_on_queue(): Data Loop " + str(i)]
self.q.put(self.some_data)
print("..threaded..SimSocket.put_on_queue(): thread ends")
class ShowGUI(BoxLayout):
label_text = StringProperty("Initial - not data")
global q
def __init__(self):
super(ShowGUI, self).__init__()
print("ShowGUI.__init__() entry")
Clock.schedule_interval(self.get_from_queue, 1.0)
def get_from_queue(self, dt):
print("---------> ShowGUI.get_from_queue() entry")
try:
queue_data = q.get(timeout = 5)
self.label_text = queue_data[0]
for qd in queue_data:
print("SimKivy.get_from_queue(): got data from queue: " + qd)
except Empty:
print("Error - no data received on queue.")
print("Unschedule Clock's schedule")
Clock.unschedule(self.get_from_queue)
class KivyGui(App):
def build(self):
return ShowGUI()
def main():
global q
ss = SimSocket(q)
simSocket_thread = Thread(name="simSocket",target=ss.put_on_queue)
simSocket_thread.start()
print("Starting KivyGui().run()")
KivyGui().run()
return 0
if __name__ == '__main__':
main()

Resources