diff --git a/.DS_Store b/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..4bdcba3b4aa11ab22e38bca8bbd38289a9dfd9c6
Binary files /dev/null and b/.DS_Store differ
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..ba1a19ad6e27d957dc29955066ae4e9152efcef6
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,51 @@
+stages:
+  - build
+  - test
+  - deploy
+
+variables:
+  DOCKER_USERNAME: "medkaddour"
+  TAG: "latest"
+  DEPLOY_ENV: "local"  # Change to 'cloud' if deploying to the cloud
+
+before_script:
+  - echo "Setting up environment..."
+
+# Build stage: Build the Docker images
+build:
+  stage: build
+  script:
+    - echo "Building Docker images..."
+    - docker build -t ${DOCKER_USERNAME}/camera:${TAG} ./services/camera
+    - docker build -t ${DOCKER_USERNAME}/motion_detector:${TAG} ./services/motion_detector
+    - docker build -t ${DOCKER_USERNAME}/object_recognizer:${TAG} ./services/object_recognizer
+  only:
+    - master
+
+# Test stage: Run unit tests (optional, can be customized based on your services)
+test:
+  stage: test
+  script:
+    - echo "Running tests..."
+    - docker run ${DOCKER_USERNAME}/camera:${TAG} pytest tests/
+    - docker run ${DOCKER_USERNAME}/motion_detector:${TAG} pytest tests/
+    - docker run ${DOCKER_USERNAME}/object_recognizer:${TAG} pytest tests/
+  only:
+    - master
+
+# Deploy stage: Deploy using Docker Compose
+deploy:
+  stage: deploy
+  script:
+    - echo "Deploying the system..."
+    - if [ "$DEPLOY_ENV" == "local" ]; then
+        docker-compose -f ./deploy/docker-compose/docker-compose.yml down --volumes --remove-orphans;
+        docker-compose -f ./deploy/docker-compose/docker-compose.yml up -d --build --force-recreate;
+      fi
+    - if [ "$DEPLOY_ENV" == "cloud" ]; then
+        docker-compose -f ./deploy/docker-compose/docker-compose.yml down --volumes --remove-orphans;
+        docker-compose -f ./deploy/docker-compose/docker-compose.yml pull --ignore-pull-failures;
+        docker-compose -f ./deploy/docker-compose/docker-compose.yml up -d --build --force-recreate;
+      fi
+  only:
+    - master
diff --git a/push.sh b/push.sh
deleted file mode 100644
index 519dfa2f9d40dbd54ff91da18ca77c7ed276364c..0000000000000000000000000000000000000000
--- a/push.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-
-# Define variables
-DOCKER_USERNAME="medkaddour"
-SERVICES=("camera" "motion_detector" "object_recognizer")
-TAG="latest"
-
-# Function to push a Docker image
-push_image() {
-  local service=$1
-  echo "Pushing Docker image for ${service} to Docker Hub..."
-  docker push ${DOCKER_USERNAME}/${service}:${TAG}
-}
-
-# Step 1: Push Docker images for all services
-for service in "${SERVICES[@]}"; do
-  push_image ${service}
-done
-
-echo "Push completed."
diff --git a/readme.md b/readme.md
index dcc0ee333524423b47ed69286fb43e5425dc14c8..7339e0193036bdcd30b1e0d22fd3ede243d496d2 100644
--- a/readme.md
+++ b/readme.md
@@ -73,7 +73,9 @@ This project demonstrates a video processing surveillance system designed to enh
   - **Concurrency:** Supports multiple clients by handling frame processing in separate threads to ensure scalability and efficiency.  
 
 - **Connection:** Listens for connections from the Motion Detector and processes incoming frames asynchronously.
-### Other Components (monitoring and distributed tracing)
+
+### Other Components (Monitoring and Distributed Tracing)
+
 To monitor your application effectively, you can integrate the following components alongside OpenTelemetry to gather comprehensive metrics and performance data:
 
 1. **OpenTelemetry Collector**  
@@ -107,9 +109,6 @@ To monitor your application effectively, you can integrate the following compone
      - **Service Metrics:** Metrics from distributed services (Camera, Motion Detection, Object Recognition).
      - **System Metrics:** Metrics from system-level exporters (e.g., CPU, memory usage, disk I/O from Node Exporter, cAdvisor).
 
-
-
-These components together allow you to monitor the application comprehensively, including its performance, system health, container resource usage, and overall operational efficiency.
 ## Scenarios Affecting Performance
 
 ### Normal Conditions
@@ -123,39 +122,4 @@ These components together allow you to monitor the application comprehensively,
 - **Description:** Some nodes may experience longer processing times due to a lack of computational resources.
 
 ### Large Distance
-- **Description:** Motion Detection deployed far from cameras.
-- **Impact:** Delay in frame transmission.
-
-## Metrics Collected
-Here are the metrics sent to OpenTelemetry:
-
-1. **FPS (Frames per Second)**
-2. **Camera-to-Edge Transmission Time (`c2e_transmission_time`)**
-3. **Frame Processing Time (`md_processing_time`)**
-4. **Edge-to-Cloud Transmission Time (`md_e2c_transmission_time`)**
-5. **Response Time (`response_time`)**
-6. **Frame Queue Length (`or_len_q`)**
-7. **Processing Time (`or_processing_time`)**
-
-## Actions
-
-1. **Scaling CPU on Cloud:** Adjusting computational resources on the cloud to handle increased load.
-2. **Moving an instance of Motion Detector:** Repositioning the Motion Detection service to optimize performance.
-
-## Service Level Objectives (SLOs)
-
-1. **Response Time:** The time taken to detect and respond to an event.
-2. **Frame Dropping Rate:** The frequency of dropped frames during transmission or processing.
-
-
-## Diagram
-
-![use case: Video Processing Edge to Cloud Application](usecase.png)
-*Figure 1: Real-time detection system for identifying and alerting dangerous animals.*
-
-## References
-
-- OpenCV library for motion detection.
-- YOLOv3 model for object recognition.
-
----
\ No newline at end of file
+- **Description:** Motion Detection deployed far from the Object Recognition service may result in increased transmission latency.
\ No newline at end of file
diff --git a/services/camera/.DS_Store b/services/camera/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..1b83a3bed89e754471532e37c85dffb2fb73f08e
Binary files /dev/null and b/services/camera/.DS_Store differ
diff --git a/services/camera/src/camera.txt b/services/camera/src/camera.txt
new file mode 100644
index 0000000000000000000000000000000000000000..2158f1435caa529469bc72b1e792614acd7063e3
--- /dev/null
+++ b/services/camera/src/camera.txt
@@ -0,0 +1,161 @@
+import argparse
+import random
+import socket
+import cv2
+import pickle
+import struct
+import time
+import datetime
+import os  # Import os for environment variables
+from tracerprovider import tracer, meter
+import pyshine as ps  # pip install pyshine
+import imutils  # pip install imutils
+import logging
+
+# Configure logging
+logging.basicConfig(
+    format='%(asctime)s - %(levelname)s - %(message)s',
+    level=logging.INFO,  # You can change this to DEBUG, ERROR, etc.
+    handlers=[
+        logging.StreamHandler(),  # Log to console
+        logging.FileHandler("output/camera.log")  # Log to a file
+    ]
+)
+
+
+def generate_random_intervals(events_per_hour):
+    # Total time in one hour (in seconds)
+    total_time = 3600
+
+    # Calculate average time between events
+    avg_interval = total_time / events_per_hour
+
+    # Generate random factors for intervals
+    random_factors = [random.uniform(0.5, 1.5) for _ in range(events_per_hour)]
+
+    # Normalize the random factors so that their sum equals total_time
+    total_factor = sum(random_factors)
+    normalized_intervals = [avg_interval * (factor / total_factor * events_per_hour) for factor in random_factors]
+
+    # Return intervals rounded to a few decimal points (optional)
+    result = [round(interval, 2) for interval in normalized_intervals]
+    logging.info(f"Generated intervals: {result}")
+    return result
+
+
+def main():
+    # Get environment variables (with defaults if not set)
+    camera = os.getenv("CAMERA", "false").lower() == "true"
+    animal_name = os.getenv("ANIMAL_NAME", "tiger")
+    appearance_rate = int(os.getenv("APPEARANCE_RATE", 600))
+    host_ip = os.getenv("MDHOST", "localhost")
+    port = int(os.getenv("MDPORT", 9998))
+
+    # Map animal to the appropriate video filenames
+    animal_map = {
+        'bear': ('footage/bear/no_bear.mp4', 'footage/bear/with_bear.mp4'),
+        'tiger': ('footage/tiger/no_tiger.mp4', 'footage/tiger/with_tiger.mp4'),
+        'wolf': ('footage/wolf/no_wolf.mp4', 'footage/wolf/with_wolf.mp4')
+    }
+
+    if animal_name not in animal_map:
+        logging.error(f"No video available for {animal_name}")
+        return
+
+    no_animal_video, with_animal_video = animal_map[animal_name]
+
+    while True:
+        client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+
+        while True:
+            try:
+                logging.info(f"Attempting to connect to {host_ip}:{port}")
+                client_socket.connect((host_ip, port))
+                logging.info(f"Connected to {host_ip}:{port}")
+                break
+            except Exception as e:
+                logging.warning(f"Cannot connect to motion detector: {e}")
+                time.sleep(1)
+                continue
+
+        # Initialize FPS calculation
+        fps_start_time = time.time()
+        fps_frame_count = 0
+        fps = 0
+        fps_histo = meter.create_histogram(
+            name="camera_fps_histo",
+            description="Frames per second",
+            unit="fps"
+        )
+        fps_count = meter.create_gauge(
+            name="camera_fps_gauge",
+            description="Frames per second",
+            unit="fps"
+        )
+
+        frame_rate = 30.0
+        frame_interval = 1.0 / frame_rate
+        motion = False
+        if client_socket:
+            while True:
+                for interval in generate_random_intervals(appearance_rate):
+                    interval_frame_count = interval * 30
+                    frame_number = 0
+                    if motion:
+                        vid = cv2.VideoCapture(with_animal_video)
+                        motion = False
+                    else:
+                        vid = cv2.VideoCapture(no_animal_video)
+                        motion = True
+                    logging.info(f"Motion: {motion}")
+                    while (frame_number < interval_frame_count) or (not motion):
+                        frame_number += 1
+                        with tracer.start_as_current_span("sending frame") as span:
+                            try:
+                                img, frame = vid.read()
+                                if not img:
+                                    if motion:
+                                        vid = cv2.VideoCapture(no_animal_video)
+                                        continue
+                                    else:
+                                        logging.info(f"Motion frames count: {frame_number}")
+                                        break
+                                frame = imutils.resize(frame, width=640)
+
+                                start_time = time.time()
+
+                                current_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+
+                                data = {
+                                    'frame': frame,
+                                    'capture_time': current_time
+                                }
+                                a = pickle.dumps(data)
+                                message = struct.pack("Q", len(a)) + a
+                                client_socket.sendall(message)
+
+                                # Update FPS calculation
+                                fps_frame_count += 1
+                                elapsed_time = time.time() - fps_start_time
+                                if elapsed_time >= 10.0:
+                                    fps = round(fps_frame_count / elapsed_time, 2)
+                                    logging.info(f"FPS: {fps} (Total frames: {fps_frame_count}, Time: {elapsed_time})")
+                                    fps_histo.record(fps)
+                                    fps_count.set(fps)
+                                    fps_frame_count = 0
+                                    fps_start_time = time.time()
+
+                                # Maintain the frame rate
+                                end_time = time.time()
+                                elapsed_time = end_time - start_time
+                                if elapsed_time < frame_interval:
+                                    time.sleep(0.025)  # frame_interval - elapsed_time
+
+                            except Exception as e:
+                                logging.error(f"Error sending frame: {e}")
+                                client_socket.close()
+                                break
+
+
+if __name__ == "__main__":
+    main()
diff --git a/services/object_recognizer/.DS_Store b/services/object_recognizer/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..5e0d297e84354d548129d8fc599a6905b5dd2134
Binary files /dev/null and b/services/object_recognizer/.DS_Store differ