Skip to content

Commit 6cb471b

Browse files
authored
Merge pull request #29 from cdsl-research/tomoyk/update-logging
Tomoyk/update logging on crawler
2 parents a6c7752 + 8e0aa0a commit 6cb471b

File tree

1 file changed

+20
-12
lines changed

1 file changed

+20
-12
lines changed

crawler/main.py

+20-12
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,18 @@
44
import time
55
from dataclasses import asdict, dataclass
66
from datetime import datetime
7+
from logging import INFO, basicConfig, getLogger
78
from typing import Dict, List
89

910
import load_config
1011
import paramiko
1112
import vim_cmd_parser
1213
from pymongo import MongoClient, UpdateOne
1314

15+
FORMAT = "%(asctime)s \t %(message)s"
16+
basicConfig(format=FORMAT, level=INFO)
17+
logger = getLogger(__name__)
18+
1419

1520
class PowerStatus:
1621
ON: str = "on"
@@ -104,10 +109,12 @@ def get_vms_list(
104109
_client: paramiko.SSHClient) -> Dict[int, MachineDetailWithOptions]:
105110
"""VMのリストを取得"""
106111

107-
print("Start get_vms_list")
112+
logger.info("++++++ Start get_vms_list ++++++")
108113
# VM情報一覧の2行目~を取得(ラベルを除外)
109114
_, stdout, stderr = _client.exec_command("vim-cmd vmsvc/getallvms")
110-
print("stderr:", stderr.read())
115+
stderr_ = stderr.read()
116+
if len(stderr_) > 0:
117+
logger.info("stderr: " + stderr_.decode("utf-8"))
111118

112119
vm_info: Dict[int, MachineDetailWithOptions] = {}
113120
for line in stdout.readlines():
@@ -131,8 +138,8 @@ def get_vms_list(
131138
# print(json.dumps(result, indent=4))
132139

133140
except Exception as e:
134-
print("Fail to create MachineDetailSpec: dat=", dat)
135-
print("Exception: ", e)
141+
logger.info("Fail to create MachineDetailSpec: dat=" + dat)
142+
logger.info(e)
136143
continue
137144

138145
# Vmidから始まる行
@@ -143,7 +150,7 @@ def get_vms_list(
143150

144151

145152
def crawl() -> List[MachineDetailForStore]:
146-
print("Start crawling")
153+
logger.info("++++++ Start crawling ++++++")
147154

148155
""" Init ssh connecter """
149156
client = paramiko.SSHClient()
@@ -154,16 +161,16 @@ def crawl() -> List[MachineDetailForStore]:
154161
machines_info: List[MachineDetailForStore] = []
155162
nodes_conf = load_config.get_esxi_nodes()
156163
for esxi_nodename, config in nodes_conf.items():
157-
print("+++ Connect to", esxi_nodename, "+++")
164+
logger.info("+++ Connect to " + esxi_nodename + " +++")
158165
try:
159166
client.connect(
160167
config.addr,
161168
username=config.username,
162169
key_filename=config.identity_file_path,
163170
timeout=5.0,
164171
)
165-
except paramiko.ssh_exception.SSHException as e:
166-
print(e)
172+
except Exception as e:
173+
logger.info("Connect error" + str(e))
167174
continue
168175

169176
# VM一覧を結合
@@ -180,7 +187,8 @@ def crawl() -> List[MachineDetailForStore]:
180187
)
181188
machines_info.append(vm_info)
182189
except Exception as e:
183-
print("Fail to parse as MachineDetailForStore:", e)
190+
logger.info("Fail to parse as MachineDetailForStore:")
191+
logger.info(e)
184192
continue
185193

186194
client.close()
@@ -221,9 +229,9 @@ def register(machines_info: List[MachineDetailForStore]):
221229

222230

223231
def main():
224-
print("Starting crawler loop")
232+
logger.info("Starting crawler loop")
225233
crawl_interval = int(os.getenv("CRAWLER_INTERVAL", "60"))
226-
print("Crawl interval =", crawl_interval, "[sec]")
234+
logger.info("Crawl interval =" + str(crawl_interval) + "[sec]")
227235

228236
while True:
229237
start_at = time.time()
@@ -232,7 +240,7 @@ def main():
232240
consumed = time.time() - start_at
233241
if crawl_interval - consumed < 0:
234242
consumed += crawl_interval
235-
print("waiting for next crawl:", consumed, "[sec]")
243+
logger.info("Waiting for next crawl: " + str(consumed) + " [sec]")
236244
time.sleep(crawl_interval - consumed)
237245

238246

0 commit comments

Comments
 (0)