Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions agents/cluster/fluidity/crds_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
"""
from __future__ import print_function
# import json
import logging
import os
# import sys

Expand Down Expand Up @@ -118,7 +117,6 @@ def register_all_fluidity_crd():
for crd_info in CRDS_INFO_LIST:
if crd_info['singular'] not in current_crds_names:
logger.info('Creating Fluidity CRD: %s', crd_info['kind'])
print('Creating Fluidity CRD: %s' % crd_info['kind'])
try:
yaml = YAML(typ='safe')
with open(crd_info['crd_file'], 'r') as data:
Expand All @@ -131,5 +129,4 @@ def register_all_fluidity_crd():
ext_api.create_custom_resource_definition(body)
except ApiException as exc:
logger.exception('%s update failed: %s', crd_info['kind'], exc)
print('%s update failed: %s' % (crd_info['kind'], exc))
raise FluidityCrdsApiException from exc
4 changes: 2 additions & 2 deletions agents/cluster/fluidity/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,7 @@ def create_svc(svc_manifest):
resp = core_api.read_namespaced_service(
name=svc_manifest['metadata']['name'],
namespace=cluster_config.NAMESPACE)
#print(resp)
#logger(resp)
except ApiException as exc:
if exc.status != 404:
logger.error('Unknown error reading service: %s', exc)
Expand All @@ -212,7 +212,7 @@ def create_svc(svc_manifest):
try:
svc_obj = core_api.create_namespaced_service(body=svc_manifest,
namespace=cluster_config.NAMESPACE)
#print(svc_obj)
#logger(svc_obj)
return svc_obj
except ApiException as exc:
logger.error('Failed to create service: %s', exc)
Expand Down
9 changes: 5 additions & 4 deletions agents/cluster/fluidity/internal_payload.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
from typing import Optional, Dict, List, Any
from enum import Enum
from mlsysops.events import MessageEvents
from agents.mlsysops.logger_util import logger

# --------------- SHARED COMPONENTS --------------- #

Expand Down Expand Up @@ -317,8 +318,8 @@ def get_payload_model(self):
try:
event = FluidityEvent(**example)
parsed_payload = event.get_payload_model()
print(f"Event: {example['event']}")
print(parsed_payload.model_dump_json(indent=4))
logger.info(f"Event: {example['event']}")
logger.info(parsed_payload.model_dump_json(indent=4))
except Exception as e:
print(f"Error processing event: {example['event']}")
print(f"Exception: {str(e)}")
logger.error(f"Error processing event: {example['event']}")
logger.error(f"Exception: {str(e)}")
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def parse_analyze_interval(interval: str) -> int:


def initialize():
print(f"Initializing policy {inspect.stack()[1].filename}")
logger.info(f"Initializing policy {inspect.stack()[1].filename}")

initialContext = {
"telemetry": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def parse_analyze_interval(interval: str) -> int:


def initialize():
print(f"Initializing policy {inspect.stack()[1].filename}")
logger.info(f"Initializing policy {inspect.stack()[1].filename}")

initialContext = {
"telemetry": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from mlsysops.logger_util import logger

def initialize():
print(f"Initializing policy {inspect.stack()[1].filename}")
logger.info(f"Initializing policy {inspect.stack()[1].filename}")

initialContext = {
"telemetry": {
Expand Down
5 changes: 3 additions & 2 deletions agents/cluster/fluidity/plan_payload.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from pydantic import BaseModel, Field, ValidationError, model_validator, RootModel
from typing import Any, Dict, List, Optional, Union
import re
from agents.mlsysops.logger_util import logger


class PlatformRequirements(BaseModel):
Expand Down Expand Up @@ -144,6 +145,6 @@ class FluidityPlanPayload(BaseModel):
# Validate the payload
try:
validated_payload = FluidityPlanPayload(**payload)
print("Validation successful!")
logger.info("Validation successful!")
except ValidationError as e:
print(f"Validation failed: {e}")
logger.error(f"Validation failed: {e}")
8 changes: 4 additions & 4 deletions agents/cluster/mechanisms/fluidity.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,8 +363,8 @@ async def apply(plan):
logger.test(f"|1| Fluidity mechanism forwarded planuid:{plan['plan_uid']} to Fluidity status:True")

except Exception as e:
logger.debug("Error in sending message to fluidity")
print(traceback.format_exc())
logger.error("Error in sending message to fluidity")
logger.exception(traceback.format_exc())

return False

Expand All @@ -376,8 +376,8 @@ async def send_message(msg):
try:
await fluidity_mechanism_instance.internal_queue_outbound.put(msg)
except Exception as e:
logger.debug("Error in sending message to fluidity")
print(traceback.format_exc())
logger.error("Error in sending message to fluidity")
logger.exception(traceback.format_exc())


def get_state():
Expand Down
2 changes: 1 addition & 1 deletion agents/cluster/policies/policy-changeCompSpec.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def parse_analyze_interval(interval: str) -> int:


def initialize():
print(f"Initializing policy {inspect.stack()[1].filename}")
logger.info(f"Initializing policy {inspect.stack()[1].filename}")

initialContext = {
"telemetry": {
Expand Down
2 changes: 1 addition & 1 deletion agents/cluster/policies/policy-relocateComponents.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from mlsysops.utilities import evaluate_condition

def initialize():
print(f"Initializing policy {inspect.stack()[1].filename}")
logger.info(f"Initializing policy {inspect.stack()[1].filename}")

initialContext = {
"telemetry": {
Expand Down
10 changes: 5 additions & 5 deletions agents/continuum/MLSContinuumAgent.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ async def run(self):
except Exception as e:
logger.error(f"Error in running tasks: {e}")

print("MLSAgent stopped.")
logger.critical("MLSAgent stopped.")

async def message_queue_listener(self):
"""
Expand Down Expand Up @@ -118,11 +118,11 @@ async def message_queue_listener(self):
"payload": {"name": data['name']},
})
else:
print(f"Unhandled event type: {event}")
logger(f"Unhandled event type: {event}")

except Exception as e:
print(f"Error processing message: {e}")
logger.error(traceback.format_exc())
logger.info(f"Error processing message: {e}")
logger.exception(traceback.format_exc())

async def apply_propagation_policies(self):
"""
Expand Down Expand Up @@ -412,5 +412,5 @@ async def get_karmada_clusters(self):
return return_object

except Exception as e:
print(f"Error retrieving clusters: {e}")
logger.error(f"Error retrieving clusters: {e}")
return []
2 changes: 1 addition & 1 deletion agents/continuum/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ async def main():
await asyncio.gather(agent_task)

except Exception as e:
print(f"An error occurred: {e}")
logger.error(f"An error occurred: {e}")


if __name__ == "__main__":
Expand Down
16 changes: 8 additions & 8 deletions agents/continuum/templates/YAMLToJSONConverter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import json
from jsonschema import validate, ValidationError
import argparse

from agents.mlsysops.logger_util import logger

def convert_yaml_crd_to_json(yaml_file: str, json_file: str):
"""
Expand Down Expand Up @@ -53,10 +53,10 @@ def convert_yaml_crd_to_json(yaml_file: str, json_file: str):
with open(json_file, 'w') as f:
json.dump(full_schema, f, indent=4)

print(f"JSON schema successfully written to {json_file}")
logger.info(f"JSON schema successfully written to {json_file}")

except Exception as e:
print(f"Error occurred: {e}")
logger.info(f"Error occurred: {e}")

def validate_yaml_against_schema(yaml_file: str, json_schema_file: str):
"""
Expand All @@ -80,11 +80,11 @@ def validate_yaml_against_schema(yaml_file: str, json_schema_file: str):

# Validate the YAML data against the JSON schema
validate(instance=yaml_data, schema=schema)
print(f"The YAML file '{yaml_file}' is valid according to the schema '{json_schema_file}'.")
logger.info(f"The YAML file '{yaml_file}' is valid according to the schema '{json_schema_file}'.")
except ValidationError as ve:
print(f"Validation Error: {ve.message}")
logger.error(f"Validation Error: {ve.message}")
except Exception as e:
print(f"Error: {e}")
logger.error(f"Error: {e}")

#example usage
if __name__ == "__main__":
Expand All @@ -101,12 +101,12 @@ def validate_yaml_against_schema(yaml_file: str, json_schema_file: str):
if args.command == "convert":
# Convert the YAML CRD schema to a JSON schema
if not args.schema:
print("Error: You must specify an output file for the JSON schema using --schema.")
logger.warning("Error: You must specify an output file for the JSON schema using --schema.")
else:
convert_yaml_crd_to_json(args.input, args.schema)
elif args.command == "validate":
# Validate a YAML file against a JSON schema
if not args.schema:
print("Error: You must specify the JSON schema file for validation using --schema.")
logger.warning("Error: You must specify the JSON schema file for validation using --schema.")
else:
validate_yaml_against_schema(args.input, args.schema)
Loading