Skip to content

Commit

Permalink
Merge pull request #29 from cdsl-research/tomoyk/update-logging
Browse files Browse the repository at this point in the history
Tomoyk/update logging on crawler
  • Loading branch information
tomoyk authored Jun 23, 2022
2 parents a6c7752 + 8e0aa0a commit 6cb471b
Showing 1 changed file with 20 additions and 12 deletions.
32 changes: 20 additions & 12 deletions crawler/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,18 @@
import time
from dataclasses import asdict, dataclass
from datetime import datetime
from logging import INFO, basicConfig, getLogger
from typing import Dict, List

import load_config
import paramiko
import vim_cmd_parser
from pymongo import MongoClient, UpdateOne

FORMAT = "%(asctime)s \t %(message)s"
basicConfig(format=FORMAT, level=INFO)
logger = getLogger(__name__)


class PowerStatus:
ON: str = "on"
Expand Down Expand Up @@ -104,10 +109,12 @@ def get_vms_list(
_client: paramiko.SSHClient) -> Dict[int, MachineDetailWithOptions]:
"""VMのリストを取得"""

print("Start get_vms_list")
logger.info("++++++ Start get_vms_list ++++++")
# VM情報一覧の2行目~を取得(ラベルを除外)
_, stdout, stderr = _client.exec_command("vim-cmd vmsvc/getallvms")
print("stderr:", stderr.read())
stderr_ = stderr.read()
if len(stderr_) > 0:
logger.info("stderr: " + stderr_.decode("utf-8"))

vm_info: Dict[int, MachineDetailWithOptions] = {}
for line in stdout.readlines():
Expand All @@ -131,8 +138,8 @@ def get_vms_list(
# print(json.dumps(result, indent=4))

except Exception as e:
print("Fail to create MachineDetailSpec: dat=", dat)
print("Exception: ", e)
logger.info("Fail to create MachineDetailSpec: dat=" + dat)
logger.info(e)
continue

# Vmidから始まる行
Expand All @@ -143,7 +150,7 @@ def get_vms_list(


def crawl() -> List[MachineDetailForStore]:
print("Start crawling")
logger.info("++++++ Start crawling ++++++")

""" Init ssh connecter """
client = paramiko.SSHClient()
Expand All @@ -154,16 +161,16 @@ def crawl() -> List[MachineDetailForStore]:
machines_info: List[MachineDetailForStore] = []
nodes_conf = load_config.get_esxi_nodes()
for esxi_nodename, config in nodes_conf.items():
print("+++ Connect to", esxi_nodename, "+++")
logger.info("+++ Connect to " + esxi_nodename + " +++")
try:
client.connect(
config.addr,
username=config.username,
key_filename=config.identity_file_path,
timeout=5.0,
)
except paramiko.ssh_exception.SSHException as e:
print(e)
except Exception as e:
logger.info("Connect error" + str(e))
continue

# VM一覧を結合
Expand All @@ -180,7 +187,8 @@ def crawl() -> List[MachineDetailForStore]:
)
machines_info.append(vm_info)
except Exception as e:
print("Fail to parse as MachineDetailForStore:", e)
logger.info("Fail to parse as MachineDetailForStore:")
logger.info(e)
continue

client.close()
Expand Down Expand Up @@ -221,9 +229,9 @@ def register(machines_info: List[MachineDetailForStore]):


def main():
print("Starting crawler loop")
logger.info("Starting crawler loop")
crawl_interval = int(os.getenv("CRAWLER_INTERVAL", "60"))
print("Crawl interval =", crawl_interval, "[sec]")
logger.info("Crawl interval =" + str(crawl_interval) + "[sec]")

while True:
start_at = time.time()
Expand All @@ -232,7 +240,7 @@ def main():
consumed = time.time() - start_at
if crawl_interval - consumed < 0:
consumed += crawl_interval
print("waiting for next crawl:", consumed, "[sec]")
logger.info("Waiting for next crawl: " + str(consumed) + " [sec]")
time.sleep(crawl_interval - consumed)


Expand Down

0 comments on commit 6cb471b

Please sign in to comment.