From fb3e1b6c02fe06ca8e50601b9e1794398b007699 Mon Sep 17 00:00:00 2001
From: sidimohammedkaddour <medkaddourr@gmail.com>
Date: Tue, 17 Dec 2024 15:10:01 +0100
Subject: [PATCH] Initial commit

---
 .DS_Store                            | Bin 0 -> 8196 bytes
 .gitlab-ci.yml                       |  51 +++++++++
 push.sh                              |  20 ----
 readme.md                            |  44 +-------
 services/camera/.DS_Store            | Bin 0 -> 6148 bytes
 services/camera/src/camera.txt       | 161 +++++++++++++++++++++++++++
 services/object_recognizer/.DS_Store | Bin 0 -> 6148 bytes
 7 files changed, 216 insertions(+), 60 deletions(-)
 create mode 100644 .DS_Store
 create mode 100644 .gitlab-ci.yml
 delete mode 100644 push.sh
 create mode 100644 services/camera/.DS_Store
 create mode 100644 services/camera/src/camera.txt
 create mode 100644 services/object_recognizer/.DS_Store

diff --git a/.DS_Store b/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..4bdcba3b4aa11ab22e38bca8bbd38289a9dfd9c6
GIT binary patch
literal 8196
zcmeHM&2Cab6h1?-y`)iFQw?!-6Ssi0&?d$WQdl%GF1$t;YLE*^Z+p2z_>m~Vu-13*
z72Ns)?p^sNCc4t^%uFjkHbG5_i8Ey8o4<3;@Ns75o(mC)V!ysklq4btnQ>zs#V-<%
zbDm2v({l+ZAWzhx>Om-LeklF9*|x(nU>UFsSOzQumVtkP0o>V~oHOoyJ!(tKfMwvn
zWPr~H7n!kOaI8@s9Vk=?0F0wsCDaiIC>hsa!Qfb<5`|73JqRNzOo$=0IL2L`4lEcP
zYt-T-v^WW4S(pe#DC>}O)pQaCjkdH5SO(@9;MjeYYE-2bHD=;>-b#0nGv8HG!c%ko
zLAPyU{SSUS47$B+_Opns#4oI_Icv_k^VB<(1Fu)@^}EGt=P|b)NE!Mk^{W3cXbdVF
zTl+HVRfDL}Q364|fhkWO1yNlNin1ToyGqv615Uz8R5p^sVLqQrxjVaiqm(<mwUbS`
z`P|-UlyI)zxOw}2<v46b@{L+G33F`FMcqCv-=a44onCmID3nnX{Va2Z>LkI>v0^8U
zngmmAlwI^>=p|a=#5)gg7Gv2-&erHI9ne0N=^lAho}NwbXJ&6fw@*jVUZWnRVZQ;?
ztpH;YtR7(n&1pS?zsaQ4%_%(<{I;w^QWF{idWBYJM#Hz?*RXUC$mLURF1!1e9<Tsc
zVRPH7$yOU2(dCSI4Ol>`B}*l<L!9IE*WR@+A9+VDBw7DM$@byoN<Z;}7qB<u_t)Vm
z>~$eLki90vNBf|JC+WV3p2L%xY8UYNF|!ZU+41h@*)iso2(_yi?fuVu*kl=476U8V
zaFXNy^4ahIm$f`=*D_!kI0ptqyyTUNFm&>eJZqD$Y8Uw#GAEul)~G~KsM{Q<!g1i~
cABL#An94E*gJX^8L9!15iUwO)2F{g%A6SYgMgRZ+

literal 0
HcmV?d00001

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100644
index 0000000..ba1a19a
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,51 @@
+stages:
+  - build
+  - test
+  - deploy
+
+variables:
+  DOCKER_USERNAME: "medkaddour"
+  TAG: "latest"
+  DEPLOY_ENV: "local"  # Change to 'cloud' if deploying to the cloud
+
+before_script:
+  - echo "Setting up environment..."
+
+# Build stage: Build the Docker images
+build:
+  stage: build
+  script:
+    - echo "Building Docker images..."
+    - docker build -t ${DOCKER_USERNAME}/camera:${TAG} ./services/camera
+    - docker build -t ${DOCKER_USERNAME}/motion_detector:${TAG} ./services/motion_detector
+    - docker build -t ${DOCKER_USERNAME}/object_recognizer:${TAG} ./services/object_recognizer
+  only:
+    - master
+
+# Test stage: Run unit tests (optional, can be customized based on your services)
+test:
+  stage: test
+  script:
+    - echo "Running tests..."
+    - docker run ${DOCKER_USERNAME}/camera:${TAG} pytest tests/
+    - docker run ${DOCKER_USERNAME}/motion_detector:${TAG} pytest tests/
+    - docker run ${DOCKER_USERNAME}/object_recognizer:${TAG} pytest tests/
+  only:
+    - master
+
+# Deploy stage: Deploy using Docker Compose
+deploy:
+  stage: deploy
+  script:
+    - echo "Deploying the system..."
+    - if [ "$DEPLOY_ENV" == "local" ]; then
+        docker-compose -f ./deploy/docker-compose/docker-compose.yml down --volumes --remove-orphans;
+        docker-compose -f ./deploy/docker-compose/docker-compose.yml up -d --build --force-recreate;
+      fi
+    - if [ "$DEPLOY_ENV" == "cloud" ]; then
+        docker-compose -f ./deploy/docker-compose/docker-compose.yml down --volumes --remove-orphans;
+        docker-compose -f ./deploy/docker-compose/docker-compose.yml pull --ignore-pull-failures;
+        docker-compose -f ./deploy/docker-compose/docker-compose.yml up -d --build --force-recreate;
+      fi
+  only:
+    - master
diff --git a/push.sh b/push.sh
deleted file mode 100644
index 519dfa2..0000000
--- a/push.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-
-# Define variables
-DOCKER_USERNAME="medkaddour"
-SERVICES=("camera" "motion_detector" "object_recognizer")
-TAG="latest"
-
-# Function to push a Docker image
-push_image() {
-  local service=$1
-  echo "Pushing Docker image for ${service} to Docker Hub..."
-  docker push ${DOCKER_USERNAME}/${service}:${TAG}
-}
-
-# Step 1: Push Docker images for all services
-for service in "${SERVICES[@]}"; do
-  push_image ${service}
-done
-
-echo "Push completed."
diff --git a/readme.md b/readme.md
index dcc0ee3..7339e01 100644
--- a/readme.md
+++ b/readme.md
@@ -73,7 +73,9 @@ This project demonstrates a video processing surveillance system designed to enh
   - **Concurrency:** Supports multiple clients by handling frame processing in separate threads to ensure scalability and efficiency.  
 
 - **Connection:** Listens for connections from the Motion Detector and processes incoming frames asynchronously.
-### Other Components (monitoring and distributed tracing)
+
+### Other Components (Monitoring and Distributed Tracing)
+
 To monitor your application effectively, you can integrate the following components alongside OpenTelemetry to gather comprehensive metrics and performance data:
 
 1. **OpenTelemetry Collector**  
@@ -107,9 +109,6 @@ To monitor your application effectively, you can integrate the following compone
      - **Service Metrics:** Metrics from distributed services (Camera, Motion Detection, Object Recognition).
      - **System Metrics:** Metrics from system-level exporters (e.g., CPU, memory usage, disk I/O from Node Exporter, cAdvisor).
 
-
-
-These components together allow you to monitor the application comprehensively, including its performance, system health, container resource usage, and overall operational efficiency.
 ## Scenarios Affecting Performance
 
 ### Normal Conditions
@@ -123,39 +122,4 @@ These components together allow you to monitor the application comprehensively,
 - **Description:** Some nodes may experience longer processing times due to a lack of computational resources.
 
 ### Large Distance
-- **Description:** Motion Detection deployed far from cameras.
-- **Impact:** Delay in frame transmission.
-
-## Metrics Collected
-Here are the metrics sent to OpenTelemetry:
-
-1. **FPS (Frames per Second)**
-2. **Camera-to-Edge Transmission Time (`c2e_transmission_time`)**
-3. **Frame Processing Time (`md_processing_time`)**
-4. **Edge-to-Cloud Transmission Time (`md_e2c_transmission_time`)**
-5. **Response Time (`response_time`)**
-6. **Frame Queue Length (`or_len_q`)**
-7. **Processing Time (`or_processing_time`)**
-
-## Actions
-
-1. **Scaling CPU on Cloud:** Adjusting computational resources on the cloud to handle increased load.
-2. **Moving an instance of Motion Detector:** Repositioning the Motion Detection service to optimize performance.
-
-## Service Level Objectives (SLOs)
-
-1. **Response Time:** The time taken to detect and respond to an event.
-2. **Frame Dropping Rate:** The frequency of dropped frames during transmission or processing.
-
-
-## Diagram
-
-![use case: Video Processing Edge to Cloud Application](usecase.png)
-*Figure 1: Real-time detection system for identifying and alerting dangerous animals.*
-
-## References
-
-- OpenCV library for motion detection.
-- YOLOv3 model for object recognition.
-
----
\ No newline at end of file
+- **Description:** Motion Detection deployed far from the Object Recognition service may result in increased transmission latency.
\ No newline at end of file
diff --git a/services/camera/.DS_Store b/services/camera/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..1b83a3bed89e754471532e37c85dffb2fb73f08e
GIT binary patch
literal 6148
zcmeHKF>V4u473vzA<<B#+%Mz@D+Dj#0~AO!5kyg-zKVC{Y0TJe;6g_lG?u)x>-Fqv
zr#PR@%ooS)huPfBrf{O2IgE|_^pU+(#DQ>}akQV)`g{L**zZQ!e+S5Yvjds-b;IYL
zObSQ=DIf);fE0MF0##q9v&SB*)1-hDcmf6ZeQ0oEFB}r%(}5vc0N@1SFwCQu05%4I
zy>LiG1m;NvCe^FO@T4Q&Dz6s~iAguF=ELh|uMWlIcAVcL-MlAilmb%VT7lPG&RPFg
z@H_qgHAyQeAO)UE0iP__%LSfPwRQA3*4hT&z?t(4r(qrx3{j4OQI4@-Iew3%%xj!u
WzZVXPK}S63K>Z9*7nv0JZv`%`G!@|h

literal 0
HcmV?d00001

diff --git a/services/camera/src/camera.txt b/services/camera/src/camera.txt
new file mode 100644
index 0000000..2158f14
--- /dev/null
+++ b/services/camera/src/camera.txt
@@ -0,0 +1,161 @@
+import argparse
+import random
+import socket
+import cv2
+import pickle
+import struct
+import time
+import datetime
+import os  # Import os for environment variables
+from tracerprovider import tracer, meter
+import pyshine as ps  # pip install pyshine
+import imutils  # pip install imutils
+import logging
+
+# Configure logging
+logging.basicConfig(
+    format='%(asctime)s - %(levelname)s - %(message)s',
+    level=logging.INFO,  # You can change this to DEBUG, ERROR, etc.
+    handlers=[
+        logging.StreamHandler(),  # Log to console
+        logging.FileHandler("output/camera.log")  # Log to a file
+    ]
+)
+
+
+def generate_random_intervals(events_per_hour):
+    # Total time in one hour (in seconds)
+    total_time = 3600
+
+    # Calculate average time between events
+    avg_interval = total_time / events_per_hour
+
+    # Generate random factors for intervals
+    random_factors = [random.uniform(0.5, 1.5) for _ in range(events_per_hour)]
+
+    # Normalize the random factors so that their sum equals total_time
+    total_factor = sum(random_factors)
+    normalized_intervals = [avg_interval * (factor / total_factor * events_per_hour) for factor in random_factors]
+
+    # Return intervals rounded to a few decimal points (optional)
+    result = [round(interval, 2) for interval in normalized_intervals]
+    logging.info(f"Generated intervals: {result}")
+    return result
+
+
+def main():
+    # Get environment variables (with defaults if not set)
+    camera = os.getenv("CAMERA", "false").lower() == "true"
+    animal_name = os.getenv("ANIMAL_NAME", "tiger")
+    appearance_rate = int(os.getenv("APPEARANCE_RATE", 600))
+    host_ip = os.getenv("MDHOST", "localhost")
+    port = int(os.getenv("MDPORT", 9998))
+
+    # Map animal to the appropriate video filenames
+    animal_map = {
+        'bear': ('footage/bear/no_bear.mp4', 'footage/bear/with_bear.mp4'),
+        'tiger': ('footage/tiger/no_tiger.mp4', 'footage/tiger/with_tiger.mp4'),
+        'wolf': ('footage/wolf/no_wolf.mp4', 'footage/wolf/with_wolf.mp4')
+    }
+
+    if animal_name not in animal_map:
+        logging.error(f"No video available for {animal_name}")
+        return
+
+    no_animal_video, with_animal_video = animal_map[animal_name]
+
+    while True:
+        client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+
+        while True:
+            try:
+                logging.info(f"Attempting to connect to {host_ip}:{port}")
+                client_socket.connect((host_ip, port))
+                logging.info(f"Connected to {host_ip}:{port}")
+                break
+            except Exception as e:
+                logging.warning(f"Cannot connect to motion detector: {e}")
+                time.sleep(1)
+                continue
+
+        # Initialize FPS calculation
+        fps_start_time = time.time()
+        fps_frame_count = 0
+        fps = 0
+        fps_histo = meter.create_histogram(
+            name="camera_fps_histo",
+            description="Frames per second",
+            unit="fps"
+        )
+        fps_count = meter.create_gauge(
+            name="camera_fps_gauge",
+            description="Frames per second",
+            unit="fps"
+        )
+
+        frame_rate = 30.0
+        frame_interval = 1.0 / frame_rate
+        motion = False
+        if client_socket:
+            while True:
+                for interval in generate_random_intervals(appearance_rate):
+                    interval_frame_count = interval * 30
+                    frame_number = 0
+                    if motion:
+                        vid = cv2.VideoCapture(with_animal_video)
+                        motion = False
+                    else:
+                        vid = cv2.VideoCapture(no_animal_video)
+                        motion = True
+                    logging.info(f"Motion: {motion}")
+                    while (frame_number < interval_frame_count) or (not motion):
+                        frame_number += 1
+                        with tracer.start_as_current_span("sending frame") as span:
+                            try:
+                                img, frame = vid.read()
+                                if not img:
+                                    if motion:
+                                        vid = cv2.VideoCapture(no_animal_video)
+                                        continue
+                                    else:
+                                        logging.info(f"Motion frames count: {frame_number}")
+                                        break
+                                frame = imutils.resize(frame, width=640)
+
+                                start_time = time.time()
+
+                                current_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+
+                                data = {
+                                    'frame': frame,
+                                    'capture_time': current_time
+                                }
+                                a = pickle.dumps(data)
+                                message = struct.pack("Q", len(a)) + a
+                                client_socket.sendall(message)
+
+                                # Update FPS calculation
+                                fps_frame_count += 1
+                                elapsed_time = time.time() - fps_start_time
+                                if elapsed_time >= 10.0:
+                                    fps = round(fps_frame_count / elapsed_time, 2)
+                                    logging.info(f"FPS: {fps} (Total frames: {fps_frame_count}, Time: {elapsed_time})")
+                                    fps_histo.record(fps)
+                                    fps_count.set(fps)
+                                    fps_frame_count = 0
+                                    fps_start_time = time.time()
+
+                                # Maintain the frame rate
+                                end_time = time.time()
+                                elapsed_time = end_time - start_time
+                                if elapsed_time < frame_interval:
+                                    time.sleep(0.025)  # frame_interval - elapsed_time
+
+                            except Exception as e:
+                                logging.error(f"Error sending frame: {e}")
+                                client_socket.close()
+                                break
+
+
+if __name__ == "__main__":
+    main()
diff --git a/services/object_recognizer/.DS_Store b/services/object_recognizer/.DS_Store
new file mode 100644
index 0000000000000000000000000000000000000000..5e0d297e84354d548129d8fc599a6905b5dd2134
GIT binary patch
literal 6148
zcmeHKO-sW-5Pe&V)C$sr2QNZ?K|%ZjvDAZSe?TR%#%gGa)@nWF?vL^JdD1sKOC(9f
zn-rOW*|%SBHv2YYvH;xZ>G}j10O-*LJ9{iPn2d|JtP^^6iO$EE;sH09A;+{e+d8&U
z0Xe%q#wd{Do~Krw-$h=eNnRuzmzU#=KK%5?8gNKPf;%!&EHNQZY6UXPdH%0H(hv)*
z8*$znaSpI1E2mHB3-acSv%&_hJ8p3M6?cVGGE%;KS$#eC_6ui;7c3i`j|S)a8Fig!
zKIM!%z2(jy>)cM(;Ie6E3YY?>z}6|ioGsSdSG3X;Fa=D3N&)#kM0CM8V5#Uo9W4A6
zfY@cXH`e8MQ8-b+IAE#B5t>sfQK>FmF`UxbABA~wz*14^aN+Xd!pbh(P@G?#@gqMS
zE>^VC6fgzq3hZceAo>6N=lg%1WGz#`6!=#PxbFCJJmQu@Zf$K&a&1JvqKnDAQn8}2
i;!`o_N-930dt-kj6=EE)RAdj$egwP>R+s{Rs=yca+h>me

literal 0
HcmV?d00001

-- 
GitLab