fetch_ml/podman/workspace/tensorflow_project/train.py
Jeremie Fraeys 4aecd469a1 feat: implement comprehensive monitoring and container orchestration
- Add Prometheus, Grafana, and Loki monitoring stack
- Include pre-configured dashboards for ML metrics and logs
- Add Podman container support with security policies
- Implement ML runtime environments for multiple frameworks
- Add containerized ML project templates (PyTorch, TensorFlow, etc.)
- Include secure runner with isolation and resource limits
- Add comprehensive log aggregation and alerting
2025-12-04 16:54:49 -05:00

80 lines
2.2 KiB
Python
Executable file

#!/usr/bin/env python3
import argparse
import json
import logging
from pathlib import Path
import time
import numpy as np
import tensorflow as tf
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--epochs", type=int, default=10)
parser.add_argument("--batch_size", type=int, default=32)
parser.add_argument("--learning_rate", type=float, default=0.001)
parser.add_argument("--output_dir", type=str, required=True)
args = parser.parse_args()
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.info(f"Training TensorFlow model for {args.epochs} epochs...")
# Generate synthetic data
np.random.seed(42)
tf.random.set_seed(42)
X = np.random.randn(1000, 20)
y = np.random.randint(0, 2, (1000,))
# Create TensorFlow dataset
dataset = tf.data.Dataset.from_tensor_slices((X, y))
dataset = dataset.shuffle(buffer_size=1000).batch(args.batch_size)
# Build model
model = tf.keras.Sequential(
[
tf.keras.layers.Dense(64, activation="relu", input_shape=(20,)),
tf.keras.layers.Dense(32, activation="relu"),
tf.keras.layers.Dense(2, activation="softmax"),
]
)
model.compile(
optimizer=tf.keras.optimizers.Adam(learning_rate=args.learning_rate),
loss="sparse_categorical_crossentropy",
metrics=["accuracy"],
)
# Training
history = model.fit(dataset, epochs=args.epochs, verbose=1)
final_accuracy = history.history["accuracy"][-1]
logger.info(f"Training completed. Final accuracy: {final_accuracy:.4f}")
# Save results
results = {
"model_type": "TensorFlow",
"epochs": args.epochs,
"batch_size": args.batch_size,
"learning_rate": args.learning_rate,
"final_accuracy": float(final_accuracy),
"n_samples": len(X),
"input_features": X.shape[1],
}
output_dir = Path(args.output_dir)
output_dir.mkdir(parents=True, exist_ok=True)
with open(output_dir / "results.json", "w") as f:
json.dump(results, f, indent=2)
# Save model
model.save(output_dir / "tensorflow_model")
logger.info("Results and model saved successfully!")
if __name__ == "__main__":
main()