4
4
import time
5
5
from dataclasses import asdict , dataclass
6
6
from datetime import datetime
7
+ from logging import INFO , basicConfig , getLogger
7
8
from typing import Dict , List
8
9
9
10
import load_config
10
11
import paramiko
11
12
import vim_cmd_parser
12
13
from pymongo import MongoClient , UpdateOne
13
14
15
+ FORMAT = "%(asctime)s \t %(message)s"
16
+ basicConfig (format = FORMAT , level = INFO )
17
+ logger = getLogger (__name__ )
18
+
14
19
15
20
class PowerStatus :
16
21
ON : str = "on"
@@ -104,10 +109,12 @@ def get_vms_list(
104
109
_client : paramiko .SSHClient ) -> Dict [int , MachineDetailWithOptions ]:
105
110
"""VMのリストを取得"""
106
111
107
- print ( " Start get_vms_list" )
112
+ logger . info ( "++++++ Start get_vms_list ++++++ " )
108
113
# VM情報一覧の2行目~を取得(ラベルを除外)
109
114
_ , stdout , stderr = _client .exec_command ("vim-cmd vmsvc/getallvms" )
110
- print ("stderr:" , stderr .read ())
115
+ stderr_ = stderr .read ()
116
+ if len (stderr_ ) > 0 :
117
+ logger .info ("stderr: " + stderr_ .decode ("utf-8" ))
111
118
112
119
vm_info : Dict [int , MachineDetailWithOptions ] = {}
113
120
for line in stdout .readlines ():
@@ -131,8 +138,8 @@ def get_vms_list(
131
138
# print(json.dumps(result, indent=4))
132
139
133
140
except Exception as e :
134
- print ("Fail to create MachineDetailSpec: dat=" , dat )
135
- print ( "Exception: " , e )
141
+ logger . info ("Fail to create MachineDetailSpec: dat=" + dat )
142
+ logger . info ( e )
136
143
continue
137
144
138
145
# Vmidから始まる行
@@ -143,7 +150,7 @@ def get_vms_list(
143
150
144
151
145
152
def crawl () -> List [MachineDetailForStore ]:
146
- print ( " Start crawling" )
153
+ logger . info ( "++++++ Start crawling ++++++ " )
147
154
148
155
""" Init ssh connecter """
149
156
client = paramiko .SSHClient ()
@@ -154,16 +161,16 @@ def crawl() -> List[MachineDetailForStore]:
154
161
machines_info : List [MachineDetailForStore ] = []
155
162
nodes_conf = load_config .get_esxi_nodes ()
156
163
for esxi_nodename , config in nodes_conf .items ():
157
- print ("+++ Connect to" , esxi_nodename , " +++" )
164
+ logger . info ("+++ Connect to " + esxi_nodename + " +++" )
158
165
try :
159
166
client .connect (
160
167
config .addr ,
161
168
username = config .username ,
162
169
key_filename = config .identity_file_path ,
163
170
timeout = 5.0 ,
164
171
)
165
- except paramiko . ssh_exception . SSHException as e :
166
- print ( e )
172
+ except Exception as e :
173
+ logger . info ( "Connect error" + str ( e ) )
167
174
continue
168
175
169
176
# VM一覧を結合
@@ -180,7 +187,8 @@ def crawl() -> List[MachineDetailForStore]:
180
187
)
181
188
machines_info .append (vm_info )
182
189
except Exception as e :
183
- print ("Fail to parse as MachineDetailForStore:" , e )
190
+ logger .info ("Fail to parse as MachineDetailForStore:" )
191
+ logger .info (e )
184
192
continue
185
193
186
194
client .close ()
@@ -221,9 +229,9 @@ def register(machines_info: List[MachineDetailForStore]):
221
229
222
230
223
231
def main ():
224
- print ("Starting crawler loop" )
232
+ logger . info ("Starting crawler loop" )
225
233
crawl_interval = int (os .getenv ("CRAWLER_INTERVAL" , "60" ))
226
- print ("Crawl interval =" , crawl_interval , "[sec]" )
234
+ logger . info ("Crawl interval =" + str ( crawl_interval ) + "[sec]" )
227
235
228
236
while True :
229
237
start_at = time .time ()
@@ -232,7 +240,7 @@ def main():
232
240
consumed = time .time () - start_at
233
241
if crawl_interval - consumed < 0 :
234
242
consumed += crawl_interval
235
- print ( "waiting for next crawl:" , consumed , " [sec]" )
243
+ logger . info ( "Waiting for next crawl: " + str ( consumed ) + " [sec]" )
236
244
time .sleep (crawl_interval - consumed )
237
245
238
246
0 commit comments