2323_mp_fork_set = False
2424if not _mp_fork_set :
2525 try :
26- if platform == ' win32' :
27- set_start_method (' spawn' )
26+ if platform == " win32" :
27+ set_start_method (" spawn" )
2828 else :
29- set_start_method (' fork' )
29+ set_start_method (" fork" )
3030 _mp_fork_set = True
3131 except Exception as e :
32- logger .info (f' error when setting multiprocessing.set_start_method - maybe the context is set { e .args } ' )
32+ logger .info (f" error when setting multiprocessing.set_start_method - maybe the context is set { e .args } " )
3333 if platform == "darwin" :
34- os .environ [' no_proxy' ] = '*'
34+ os .environ [" no_proxy" ] = "*"
3535
3636def register_decorated_fn (name : str , poll_interval : int , domain : str , worker_id : str , func ):
37- logger .info (f' decorated { name } ' )
37+ logger .info (f" decorated { name } " )
3838 _decorated_functions [(name , domain )] = {
39- ' func' : func ,
40- ' poll_interval' : poll_interval ,
41- ' domain' : domain ,
42- ' worker_id' : worker_id
39+ " func" : func ,
40+ " poll_interval" : poll_interval ,
41+ " domain" : domain ,
42+ " worker_id" : worker_id
4343 }
4444
4545
@@ -56,11 +56,11 @@ def __init__(
5656 self .logger_process , self .queue = _setup_logging_queue (configuration )
5757
5858 # imports
59- importlib .import_module (' conductor.client.http.models.task' )
60- importlib .import_module (' conductor.client.worker.worker_task' )
59+ importlib .import_module (" conductor.client.http.models.task" )
60+ importlib .import_module (" conductor.client.worker.worker_task" )
6161 if import_modules is not None :
6262 for module in import_modules :
63- logger .info (f' loading module { module } ' )
63+ logger .info (f" loading module { module } " )
6464 importlib .import_module (module )
6565
6666 elif not isinstance (workers , list ):
@@ -77,12 +77,12 @@ def __init__(
7777 worker_id = worker_id ,
7878 domain = domain ,
7979 poll_interval = poll_interval )
80- logger .info (f' created worker with name={ task_def_name } and domain={ domain } ' )
80+ logger .info (f" created worker with name={ task_def_name } and domain={ domain } " )
8181 workers .append (worker )
8282
8383 self .__create_task_runner_processes (workers , configuration , metrics_settings )
8484 self .__create_metrics_provider_process (metrics_settings )
85- logger .info (' TaskHandler initialized' )
85+ logger .info (" TaskHandler initialized" )
8686
8787 def __enter__ (self ):
8888 return self
@@ -93,24 +93,24 @@ def __exit__(self, exc_type, exc_value, traceback):
9393 def stop_processes (self ) -> None :
9494 self .__stop_task_runner_processes ()
9595 self .__stop_metrics_provider_process ()
96- logger .info (' Stopped worker processes...' )
96+ logger .info (" Stopped worker processes..." )
9797 self .queue .put (None )
9898 self .logger_process .terminate ()
9999
100100 def start_processes (self ) -> None :
101- logger .info (' Starting worker processes...' )
101+ logger .info (" Starting worker processes..." )
102102 freeze_support ()
103103 self .__start_task_runner_processes ()
104104 self .__start_metrics_provider_process ()
105- logger .info (' Started all processes' )
105+ logger .info (" Started all processes" )
106106
107107 def join_processes (self ) -> None :
108108 try :
109109 self .__join_task_runner_processes ()
110110 self .__join_metrics_provider_process ()
111- logger .info (' Joined all processes' )
111+ logger .info (" Joined all processes" )
112112 except KeyboardInterrupt :
113- logger .info (' KeyboardInterrupt: Stopping all processes' )
113+ logger .info (" KeyboardInterrupt: Stopping all processes" )
114114 self .stop_processes ()
115115
116116 def __create_metrics_provider_process (self , metrics_settings : MetricsSettings ) -> None :
@@ -121,7 +121,7 @@ def __create_metrics_provider_process(self, metrics_settings: MetricsSettings) -
121121 target = MetricsCollector .provide_metrics ,
122122 args = (metrics_settings ,)
123123 )
124- logger .info (' Created MetricsProvider process' )
124+ logger .info (" Created MetricsProvider process" )
125125
126126 def __create_task_runner_processes (
127127 self ,
@@ -149,25 +149,25 @@ def __start_metrics_provider_process(self):
149149 if self .metrics_provider_process is None :
150150 return
151151 self .metrics_provider_process .start ()
152- logger .info (' Started MetricsProvider process' )
152+ logger .info (" Started MetricsProvider process" )
153153
154154 def __start_task_runner_processes (self ):
155155 n = 0
156156 for task_runner_process in self .task_runner_processes :
157157 task_runner_process .start ()
158158 n = n + 1
159- logger .info (f' Started { n } TaskRunner process' )
159+ logger .info (f" Started { n } TaskRunner process" )
160160
161161 def __join_metrics_provider_process (self ):
162162 if self .metrics_provider_process is None :
163163 return
164164 self .metrics_provider_process .join ()
165- logger .info (' Joined MetricsProvider processes' )
165+ logger .info (" Joined MetricsProvider processes" )
166166
167167 def __join_task_runner_processes (self ):
168168 for task_runner_process in self .task_runner_processes :
169169 task_runner_process .join ()
170- logger .info (' Joined TaskRunner processes' )
170+ logger .info (" Joined TaskRunner processes" )
171171
172172 def __stop_metrics_provider_process (self ):
173173 self .__stop_process (self .metrics_provider_process )
@@ -180,12 +180,12 @@ def __stop_process(self, process: Process):
180180 if process is None :
181181 return
182182 try :
183- logger .debug (f' Terminating process: { process .pid } ' )
183+ logger .debug (f" Terminating process: { process .pid } " )
184184 process .terminate ()
185185 except Exception as e :
186- logger .debug (f' Failed to terminate process: { process .pid } , reason: { e } ' )
186+ logger .debug (f" Failed to terminate process: { process .pid } , reason: { e } " )
187187 process .kill ()
188- logger .debug (f' Killed process: { process .pid } ' )
188+ logger .debug (f" Killed process: { process .pid } " )
189189
190190
191191# Setup centralized logging queue
0 commit comments