AI Construction Progress Monitoring from Photo Video System

We design and deploy artificial intelligence systems: from prototype to production-ready solutions. Our team combines expertise in machine learning, data engineering and MLOps to make AI work not in the lab, but in real business.
Showing 1 of 1 servicesAll 1566 services
AI Construction Progress Monitoring from Photo Video System
Complex
~2-4 weeks
FAQ
AI Development Areas
AI Solution Development Stages
Latest works
  • image_website-b2b-advance_0.png
    B2B ADVANCE company website development
    1212
  • image_web-applications_feedme_466_0.webp
    Development of a web application for FEEDME
    1161
  • image_websites_belfingroup_462_0.webp
    Website development for BELFINGROUP
    852
  • image_ecommerce_furnoro_435_0.webp
    Development of an online store for the company FURNORO
    1041
  • image_logo-advance_0.png
    B2B Advance company logo design
    561
  • image_crm_enviok_479_0.webp
    Development of a web application for Enviok
    822

AI-based construction monitoring via video

Comparing actual construction progress with the project schedule is a task traditionally accomplished through an on-site inspector with a checklist. Using video from cameras or drones, an AI system automatically assesses the completion percentage of structural elements and alerts to any delays.

Tracking Progress: 4D BIM + CV

import cv2
import numpy as np
import torch
from ultralytics import YOLO
from datetime import datetime

class ConstructionProgressMonitor:
    def __init__(self, config: dict):
        # Детектор конструктивных элементов и этапов работ
        self.detector = YOLO(config['model_path'])  # дообучен на стройке

        # Классы: formwork, rebar, concrete_pour, brickwork,
        #         roofing, glazing, scaffolding, crane
        self.progress_classes = config['progress_classes']

        # Эталонный план прогресса (из BIM/графика)
        self.schedule = config['schedule']  # {week: {zone: expected_stage}}

        self.camera_zones = config['camera_zones']  # привязка камер к зонам

    def analyze_frame(self, frame: np.ndarray,
                       camera_id: str,
                       timestamp: datetime) -> dict:
        results = self.detector(frame, conf=0.45)
        zone = self.camera_zones.get(camera_id, 'unknown')

        detected_stages = {}
        for box in results[0].boxes:
            cls = self.detector.model.names[int(box.cls)]
            conf = float(box.conf)
            x1, y1, x2, y2 = map(int, box.xyxy[0])
            area = (x2-x1) * (y2-y1)

            if cls in self.progress_classes:
                detected_stages[cls] = max(
                    detected_stages.get(cls, 0),
                    conf * (area / (frame.shape[0] * frame.shape[1]))
                )

        # Определяем текущую стадию строительства по detected stages
        current_stage = self._determine_stage(detected_stages)

        # Сравнение с планом
        week_number = self._get_week_number(timestamp)
        expected_stage = self.schedule.get(week_number, {}).get(zone)

        progress_status = self._compare_with_schedule(
            current_stage, expected_stage
        )

        return {
            'zone': zone,
            'timestamp': timestamp.isoformat(),
            'current_stage': current_stage,
            'expected_stage': expected_stage,
            'status': progress_status,
            'detected_elements': detected_stages
        }

    def _determine_stage(self, elements: dict) -> str:
        """
        Иерархия строительных этапов.
        Если видим арматуру без опалубки → armature stage.
        Если видим опалубку → formwork stage.
        Если видим бетонирование → concrete pour.
        """
        if elements.get('concrete_pour', 0) > 0.3:
            return 'concrete_pour'
        if elements.get('formwork', 0) > 0.2:
            return 'formwork'
        if elements.get('rebar', 0) > 0.2:
            return 'rebar'
        if elements.get('brickwork', 0) > 0.3:
            return 'masonry'
        if elements.get('glazing', 0) > 0.2:
            return 'finishing'
        return 'site_preparation'

Drones for progress monitoring: automated flybys

class DroneProgressSurvey:
    def __init__(self, flight_plan_path: str):
        self.waypoints = self._load_flight_plan(flight_plan_path)
        self.ortho_processor = OrthoPhotoProcessor()

    def generate_weekly_report(self,
                                current_photos: list[np.ndarray],
                                baseline_photos: list[np.ndarray],
                                gps_tags: list[dict]) -> dict:
        """
        Сравниваем ортофото текущей недели с предыдущей.
        Изменения в пикселях = прогресс строительства.
        """
        # Ортофото: сшиваем перекрывающиеся снимки в единую карту
        current_ortho = self.ortho_processor.stitch(current_photos, gps_tags)
        baseline_ortho = self.ortho_processor.load_baseline()

        # Попиксельное сравнение через change detection
        diff_map = self._detect_changes(current_ortho, baseline_ortho)

        # Площадь изменений в квадратных метрах
        changed_area_m2 = self._pixels_to_sqm(
            diff_map, gps_tags
        )

        return {
            'week': datetime.now().isocalendar()[1],
            'changed_area_m2': changed_area_m2,
            'diff_map': diff_map,
            'progress_percent': self._estimate_progress(diff_map)
        }

    def _detect_changes(self, current: np.ndarray,
                         baseline: np.ndarray) -> np.ndarray:
        """SSIM-based change detection между двумя ортофото"""
        from skimage.metrics import structural_similarity as ssim

        # Конвертируем в grayscale
        cur_gray = cv2.cvtColor(current, cv2.COLOR_BGR2GRAY)
        base_gray = cv2.cvtColor(baseline, cv2.COLOR_BGR2GRAY)

        # Выравниваем размеры
        if cur_gray.shape != base_gray.shape:
            base_gray = cv2.resize(base_gray, cur_gray.shape[::-1])

        score, diff = ssim(cur_gray, base_gray, full=True)
        diff = (1 - diff) * 255
        diff = diff.astype(np.uint8)

        # Пороговая бинаризация
        _, mask = cv2.threshold(diff, 30, 255, cv2.THRESH_BINARY)
        return mask

Integration with Procore, Autodesk Construction Cloud

Monitoring results are integrated into construction platforms via API:

  • Procore: automatic observations of identified delays
  • Autodesk BIM 360: 4D Model Progress Update
  • MS Project / Primavera P6: actual vs. plan in the form of task updates

Case: Shopping center, 45,000 m², 18 months of construction

Weekly drone flights (DJI Phantom 4 RTK), 350–400 photos per flight. AI-based progress analysis by zone.

Over a 12-week period, the system recorded a delay in Section D (monolithic work): expected progress was 65%, while actual progress, based on video analysis, was 48%. This allowed the schedule to be adjusted three weeks before the critical deadline by redistributing crews.

Project type Term
Basic building element detector 4–6 weeks
Progress monitoring system (cameras + reports) 8–12 weeks
Full platform with drone + BIM integration 14–22 weeks