import React, { useEffect, useRef } from 'react'; import { Crosshair, Navigation, Target, Activity, Zap, Shield, Eye, Map, Video, Cpu, Wifi, Battery, ChevronRight, Save, XSquare, AlertCircle, ChevronLeft, LocateFixed, Fingerprint } from 'lucide-react'; const SarasScope3D = () => { const mountRef = useRef(null); useEffect(() => { let scene, camera, renderer, animationId, group; const initThree = () => { if (!mountRef.current || !window.THREE) return; const THREE = window.THREE; const width = mountRef.current.clientWidth || 500; const height = mountRef.current.clientHeight || 400; scene = new THREE.Scene(); camera = new THREE.PerspectiveCamera(45, width / height, 0.1, 1000); camera.position.set(0, 5, 20); renderer = new THREE.WebGLRenderer({ antialias: true, alpha: true }); renderer.setSize(width, height); renderer.setPixelRatio(window.devicePixelRatio); if (mountRef.current.firstChild) { mountRef.current.removeChild(mountRef.current.firstChild); } mountRef.current.appendChild(renderer.domElement); // Lighting Setup const ambientLight = new THREE.AmbientLight(0xffffff, 0.6); scene.add(ambientLight); const dirLight = new THREE.DirectionalLight(0xffffff, 0.8); dirLight.position.set(10, 20, 10); scene.add(dirLight); const blueLight = new THREE.PointLight(0x06b6d4, 2, 50); blueLight.position.set(-5, 0, 5); scene.add(blueLight); group = new THREE.Group(); // Procedural Model Materials const bodyMat = new THREE.MeshStandardMaterial({ color: 0x1e293b, roughness: 0.7, metalness: 0.3 }); const metalMat = new THREE.MeshStandardMaterial({ color: 0x475569, roughness: 0.4, metalness: 0.8 }); const lensMat = new THREE.MeshStandardMaterial({ color: 0x082f49, roughness: 0.1, metalness: 0.9, transparent: true, opacity: 0.9 }); // Main Scope Body const bodyGeo = new THREE.CylinderGeometry(1.5, 1.5, 8, 32); bodyGeo.rotateZ(Math.PI / 2); const body = new THREE.Mesh(bodyGeo, bodyMat); group.add(body); // Front Lens Hood const frontHoodGeo = new THREE.CylinderGeometry(1.8, 1.5, 2, 32); frontHoodGeo.rotateZ(Math.PI / 2); const frontHood = new THREE.Mesh(frontHoodGeo, bodyMat); frontHood.position.set(-5, 0, 0); group.add(frontHood); // Front Glass Lens const lensGeo = new THREE.CylinderGeometry(1.6, 1.6, 0.2, 32); lensGeo.rotateZ(Math.PI / 2); const frontLens = new THREE.Mesh(lensGeo, lensMat); frontLens.position.set(-5.9, 0, 0); group.add(frontLens); // Eyepiece const eyePieceGeo = new THREE.CylinderGeometry(1.2, 1.5, 1.5, 32); eyePieceGeo.rotateZ(Math.PI / 2); const eyePiece = new THREE.Mesh(eyePieceGeo, metalMat); eyePiece.position.set(4.75, 0, 0); group.add(eyePiece); // Top Sensor/Compute Module const topBoxGeo = new THREE.BoxGeometry(4, 1.2, 2.5); const topBox = new THREE.Mesh(topBoxGeo, bodyMat); topBox.position.set(-1, 1.8, 0); group.add(topBox); // Top Module Camera/Sensor Lens const sensorGeo = new THREE.CylinderGeometry(0.5, 0.5, 0.5, 16); sensorGeo.rotateZ(Math.PI / 2); const sensor = new THREE.Mesh(sensorGeo, lensMat); sensor.position.set(-3, 1.8, 0); group.add(sensor); // Rail Mount Base const mountGeo = new THREE.BoxGeometry(6, 1, 1.5); const mountMesh = new THREE.Mesh(mountGeo, metalMat); mountMesh.position.set(0, -1.8, 0); group.add(mountMesh); // Picatinny Rail Interface Simulator const railGeo = new THREE.BoxGeometry(6, 0.2, 2); const railMesh = new THREE.Mesh(railGeo, bodyMat); railMesh.position.set(0, -2.4, 0); group.add(railMesh); scene.add(group); // Initial isometric angle group.rotation.y = -Math.PI / 6; group.rotation.x = Math.PI / 12; // Mouse/Touch Interaction logic let isDragging = false; let previousMousePosition = { x: 0, y: 0 }; const onMouseDown = (e) => { isDragging = true; }; const onMouseUp = () => { isDragging = false; }; const onMouseMove = (e) => { if (isDragging && group) { const deltaMove = { x: e.offsetX - previousMousePosition.x, y: e.offsetY - previousMousePosition.y }; group.rotation.y += deltaMove.x * 0.01; group.rotation.x += deltaMove.y * 0.01; } previousMousePosition = { x: e.offsetX, y: e.offsetY }; }; const onTouchStart = (e) => { isDragging = true; previousMousePosition = { x: e.touches[0].clientX, y: e.touches[0].clientY }; }; const onTouchEnd = () => { isDragging = false; }; const onTouchMove = (e) => { if (isDragging && group) { const deltaMove = { x: e.touches[0].clientX - previousMousePosition.x, y: e.touches[0].clientY - previousMousePosition.y }; group.rotation.y += deltaMove.x * 0.01; group.rotation.x += deltaMove.y * 0.01; } previousMousePosition = { x: e.touches[0].clientX, y: e.touches[0].clientY }; }; const canvas = renderer.domElement; canvas.addEventListener('mousedown', onMouseDown); window.addEventListener('mouseup', onMouseUp); canvas.addEventListener('mousemove', onMouseMove); canvas.addEventListener('touchstart', onTouchStart, {passive: true}); window.addEventListener('touchend', onTouchEnd); canvas.addEventListener('touchmove', onTouchMove, {passive: true}); // Render Loop const animate = () => { animationId = requestAnimationFrame(animate); if (!isDragging) { group.rotation.y += 0.003; // Auto rotate slowly when not interacting } renderer.render(scene, camera); }; animate(); const handleResize = () => { if (!mountRef.current) return; const w = mountRef.current.clientWidth; const h = mountRef.current.clientHeight; renderer.setSize(w, h); camera.aspect = w / h; camera.updateProjectionMatrix(); }; window.addEventListener('resize', handleResize); }; // Load Three.js dynamically if (!window.THREE) { const script = document.createElement('script'); script.src = 'https://cdnjs.cloudflare.com/ajax/libs/three.js/r128/three.min.js'; script.onload = initThree; document.head.appendChild(script); } else { initThree(); } return () => { if (animationId) cancelAnimationFrame(animationId); window.removeEventListener('mouseup', () => {}); window.removeEventListener('touchend', () => {}); }; }, []); return (
Smart Assault Rifle Augmentation System
Empowering the Indian Infantry for Multi-Domain Operations.
The Smart Assault Rifle Augmentation System (SARAS) is a unique computer platform for assault weapons designed for the future infantry soldier.
SARAS provides a wide range of advanced capabilities for enhanced combat lethality, soldier survivability and improved situational awareness, connecting to various applications, reconnaissance networks and combat management systems.
SARAS transforms any assault rifle into a smart weapon. Modified for existing equipment like the AK-203 or SIG 716, it offers two distinct configurations:
System includes a built-in weapon sight.
Enables seamless use of any existing weapon sight as needed.
GHATAK PLATOON IN COVER
URBAN CI OPERATION
DIGITAL BARREL FEED
SARAS is designed to relay data through the weapon sight. The images from the optic system are sent directly to the eyepiece and displayed on a cutting-edge user interface with unique digital display technologies.
Provides the soldier with new shooting options - such as shooting out of position or around corners. This is critical for urban warfare and trench defenses along the Line of Control (LoC).
SARAS supports the development of applications in an open architecture environment, enabling integration of additional assault weapon solutions incorporating advanced sensors, artificial intelligence (AI), and augmented reality (AR).
| Feature | 17µ SARAS | 12µ SARAS (Advanced) |
|---|---|---|
| Thermal Sensor | Uncooled 640x480 17μ | Uncooled 640x480 12μ |
| HFOV | 27° | 27° |
| Detection (Human) | 580m | 580m |
| Recognition (Human) | 250m | 250m |
| Identification (Human) | 120m | 120m |
| Display Type | 800x600 OLED | 800x600 OLED |
| Total Weight (inc. batt) | 1000g | 850g |