aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitignore2
-rw-r--r--backend/app.py77
-rw-r--r--backend/server/Dockerfile (renamed from backend/Dockerfile)8
-rw-r--r--backend/server/app.py145
-rw-r--r--backend/server/requirements.txt (renamed from backend/requirements.txt)4
-rw-r--r--backend/worker/Dockerfile17
-rw-r--r--backend/worker/requirements.txt5
-rw-r--r--backend/worker/worker.py129
-rw-r--r--frontend/components.json17
-rw-r--r--frontend/package.json16
-rw-r--r--frontend/pnpm-lock.yaml1957
-rw-r--r--frontend/src/App.tsx2
-rw-r--r--frontend/src/components/Dashboard.tsx186
-rw-r--r--frontend/src/components/ImageProcessor.tsx67
-rw-r--r--frontend/src/components/ImageSideBar.tsx24
-rw-r--r--frontend/src/components/ui/button.tsx56
-rw-r--r--frontend/src/components/ui/input.tsx25
-rw-r--r--frontend/src/components/ui/select.tsx158
-rw-r--r--frontend/src/index.css77
-rw-r--r--frontend/src/lib/utils.ts6
-rw-r--r--frontend/tailwind.config.js78
-rw-r--r--frontend/tsconfig.json24
-rw-r--r--frontend/vite.config.ts11
23 files changed, 2856 insertions, 235 deletions
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..3b72af0
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,2 @@
+.env
+__pycache__
diff --git a/backend/app.py b/backend/app.py
deleted file mode 100644
index 4ed4954..0000000
--- a/backend/app.py
+++ /dev/null
@@ -1,77 +0,0 @@
-import os
-from flask import Flask, request, jsonify
-from flask_cors import CORS
-import boto3
-import cv2
-import tempfile
-import numpy as np
-
-app = Flask(__name__)
-CORS(app)
-
-# Configure AWS S3
-s3 = boto3.client('s3')
-BUCKET_NAME_ORIGINAL = "original-images-rapid-macaw"
-BUCKET_NAME_PROCESSED = "processed-images-rapid-macaw"
-
-@app.route('/upload', methods=['POST'])
-def upload_file():
- if 'image' not in request.files:
- return jsonify({'error': 'No file provided'}), 400
-
- file = request.files['image']
- operation = request.form.get('operation', 'edge_detection') # Default to edge detection
- if file and allowed_file(file.filename):
- # Save the file temporarily
- temp_file = tempfile.NamedTemporaryFile(delete=False)
- file.save(temp_file.name)
-
- # Upload to S3 original bucket
- with open(temp_file.name, "rb") as img_data:
- s3.put_object(Bucket=BUCKET_NAME_ORIGINAL, Key=file.filename, Body=img_data, ContentType="image/png")
-
- # Fetch the image from the original bucket
- original_img_obj = s3.get_object(Bucket=BUCKET_NAME_ORIGINAL, Key=file.filename)
- original_img_data = original_img_obj['Body'].read()
-
- # Process the image
- processed_image_path = process_image(original_img_data, operation)
-
- # Upload processed image to S3 processed bucket
- processed_filename = f"processed_{file.filename}"
- with open(processed_image_path, "rb") as processed_img_data:
- s3.put_object(Bucket=BUCKET_NAME_PROCESSED, Key=processed_filename, Body=processed_img_data, ContentType="image/png")
-
- # Clean up temporary files
- os.remove(temp_file.name)
- os.remove(processed_image_path)
-
- processed_file_url = f'https://{BUCKET_NAME_PROCESSED}.s3.amazonaws.com/{processed_filename}'
-
- return jsonify({'message': 'File processed and uploaded successfully', 'processed_file': processed_file_url}), 200
- else:
- return jsonify({'error': 'Invalid file type'}), 400
-
-def allowed_file(filename):
- return '.' in filename and filename.rsplit('.', 1)[1].lower() in {'png', 'jpg', 'jpeg'}
-
-def process_image(image_data, operation):
- # Convert image data to numpy array
- nparr = np.frombuffer(image_data, np.uint8)
- img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
-
- processed_img = img
-
- if operation == 'edge_detection':
- processed_img = cv2.Canny(img, 100, 200)
- elif operation == 'color_inversion':
- processed_img = cv2.bitwise_not(img)
-
- # Save processed image to a temporary path
- output_path = os.path.join(tempfile.gettempdir(), f'processed_image.png')
- cv2.imwrite(output_path, processed_img)
-
- return output_path
-
-if __name__ == '__main__':
- app.run(host='0.0.0.0', port=5000, debug=True)
diff --git a/backend/Dockerfile b/backend/server/Dockerfile
index ff49235..8601d37 100644
--- a/backend/Dockerfile
+++ b/backend/server/Dockerfile
@@ -1,7 +1,10 @@
# Use an official Python runtime as a parent image
FROM python:3.9-slim
-# Set the working directory to /app
+# Install dependencies for OpenCV using apt-get
+RUN apt-get update && apt-get install -y libgl1 libglib2.0-0 curl
+
+# Set the working directory
WORKDIR /app
# Copy the current directory contents into the container at /app
@@ -13,6 +16,9 @@ RUN pip install --no-cache-dir -r requirements.txt
# Make port 5000 available to the world outside this container
EXPOSE 5000
+# Define environment variable
+ENV FLASK_APP=app.py
+
# Run app.py when the container launches
CMD ["python", "app.py"]
diff --git a/backend/server/app.py b/backend/server/app.py
new file mode 100644
index 0000000..1b4e80e
--- /dev/null
+++ b/backend/server/app.py
@@ -0,0 +1,145 @@
+import pika
+import json
+from flask import Flask, request, jsonify
+from flask_cors import CORS
+import boto3
+import tempfile
+import os
+import base64
+import cv2
+import numpy as np
+
+app = Flask(__name__)
+CORS(app)
+
+s3 = boto3.client('s3')
+BUCKET_NAME_ORIGINAL = "original-images-allowing-griffon"
+BUCKET_NAME_PROCESSED = "processed-images-allowing-griffon"
+
+def allowed_file(filename):
+ return '.' in filename and filename.rsplit('.', 1)[1].lower() in {'png', 'jpg', 'jpeg'}
+
+def split_image(image_data, num_parts):
+ img = cv2.imdecode(np.frombuffer(image_data, np.uint8), cv2.IMREAD_COLOR)
+ height, width, _ = img.shape
+ part_height = height // num_parts
+ parts = []
+
+ for i in range(num_parts):
+ part_img = img[i * part_height: (i + 1) * part_height if i != num_parts - 1 else height, :, :]
+ _, buffer = cv2.imencode('.png', part_img)
+ part_data = buffer.tobytes()
+ parts.append(part_data)
+
+ return parts, width, height, part_height
+
+def publish_task(part_data, filename, part_num, operation, callback_queue):
+ connection = pika.BlockingConnection(pika.ConnectionParameters('<rabbit-mq-server-public-ip'))
+ channel = connection.channel()
+ channel.queue_declare(queue='image_tasks')
+
+ task = {
+ 'part_data': base64.b64encode(part_data).decode('utf-8'),
+ 'filename': filename,
+ 'part_num': part_num,
+ 'operation': operation,
+ 'callback_queue': callback_queue
+ }
+ channel.basic_publish(exchange='', routing_key='image_tasks', body=json.dumps(task))
+ connection.close()
+ print(f"Published task for part {part_num}")
+
+def merge_parts(filename, num_parts, width, height, part_height):
+ merged_img = np.zeros((height, width, 3), dtype=np.uint8)
+
+ for i in range(num_parts):
+ part_key = f"{filename}_part_{i}"
+ part_obj = s3.get_object(Bucket=BUCKET_NAME_PROCESSED, Key=part_key)
+ part_data = part_obj['Body'].read()
+ part_img = cv2.imdecode(np.frombuffer(part_data, np.uint8), cv2.IMREAD_COLOR)
+
+ if part_img is None:
+ print(f"Failed to decode part {i}")
+ continue
+
+ start_row = i * part_height
+ end_row = (i + 1) * part_height if i != num_parts - 1 else height
+ merged_img[start_row:end_row, :, :] = part_img
+
+ merged_filename = f"processed_{filename}"
+ _, buffer = cv2.imencode('.jpg', merged_img)
+ merged_data = buffer.tobytes()
+
+ with tempfile.NamedTemporaryFile(delete=False) as temp_file:
+ temp_file.write(merged_data)
+ temp_file.seek(0) # Ensure the file pointer is at the beginning
+ s3.put_object(Bucket=BUCKET_NAME_PROCESSED, Key=merged_filename, Body=temp_file.read(), ContentType="image/jpg")
+ os.remove(temp_file.name)
+
+ return merged_filename
+
+
+@app.route('/health', methods=['GET'])
+def health_check():
+ return jsonify({'status': 'ok'}), 200
+
+@app.route('/upload', methods=['POST'])
+def upload_file():
+ if 'image' not in request.files:
+ return jsonify({'error': 'No file provided'}), 400
+
+ file = request.files['image']
+ operation = request.form.get('operation', 'edge_detection')
+ num_parts = int(request.form.get('num_parts', 8)) # Default to 4 parts
+
+ if file and allowed_file(file.filename):
+ temp_file = tempfile.NamedTemporaryFile(delete=False)
+ file.save(temp_file.name)
+
+ with open(temp_file.name, "rb") as img_data:
+ s3.put_object(Bucket=BUCKET_NAME_ORIGINAL, Key=file.filename, Body=img_data, ContentType="image/png")
+
+ original_img_obj = s3.get_object(Bucket=BUCKET_NAME_ORIGINAL, Key=file.filename)
+ original_img_data = original_img_obj['Body'].read()
+
+ parts, width, height, part_height = split_image(original_img_data, num_parts)
+ callback_queue = f"{file.filename}_callback"
+
+ # Declare callback queue
+ connection = pika.BlockingConnection(pika.ConnectionParameters('<rabbit-mq-server-public-ip'))
+ channel = connection.channel()
+ channel.queue_declare(queue=callback_queue)
+ connection.close()
+
+ for i, part_data in enumerate(parts):
+ publish_task(part_data, file.filename, i, operation, callback_queue)
+
+ os.remove(temp_file.name)
+
+ # Wait for completion notifications
+ def on_completion(ch, method, properties, body):
+ nonlocal num_parts_processed
+ num_parts_processed += 1
+ print(f"Part {num_parts_processed} received")
+ if num_parts_processed == num_parts:
+ merged_filename = merge_parts(file.filename, num_parts, width, height, part_height)
+ processed_file_url = f'https://{BUCKET_NAME_PROCESSED}.s3.amazonaws.com/{merged_filename}'
+ ch.stop_consuming()
+ response_queue.put(processed_file_url)
+
+ import queue
+ response_queue = queue.Queue()
+ num_parts_processed = 0
+
+ connection = pika.BlockingConnection(pika.ConnectionParameters('<rabbit-mq-server-public-ip'))
+ channel = connection.channel()
+ channel.basic_consume(queue=callback_queue, on_message_callback=on_completion, auto_ack=True)
+ channel.start_consuming()
+
+ processed_file_url = response_queue.get()
+ return jsonify({'message': 'File processed and uploaded successfully', 'processed_file': processed_file_url}), 200
+ else:
+ return jsonify({'error': 'Invalid file type'}), 400
+
+if __name__ == '__main__':
+ app.run(host='0.0.0.0', port=5000)
diff --git a/backend/requirements.txt b/backend/server/requirements.txt
index bf3d422..eb7d129 100644
--- a/backend/requirements.txt
+++ b/backend/server/requirements.txt
@@ -1,5 +1,7 @@
flask
flask-cors
boto3
-opencv-python-headless
+opencv-python
numpy
+pika
+
diff --git a/backend/worker/Dockerfile b/backend/worker/Dockerfile
new file mode 100644
index 0000000..8752e1c
--- /dev/null
+++ b/backend/worker/Dockerfile
@@ -0,0 +1,17 @@
+# Use an official Python runtime as a parent image
+FROM python:3.9-slim
+
+# Install dependencies for OpenCV using yum
+RUN apt-get update && apt-get install -y libgl1 libglib2.0-0
+
+# Set the working directory
+WORKDIR /worker
+
+# Copy the current directory contents into the container at /app
+COPY . /worker
+
+# Install any needed packages specified in requirements.txt
+RUN pip install --no-cache-dir -r requirements.txt
+
+# Run worker.py when the container launches
+CMD ["python", "worker.py"]
diff --git a/backend/worker/requirements.txt b/backend/worker/requirements.txt
new file mode 100644
index 0000000..0491cb0
--- /dev/null
+++ b/backend/worker/requirements.txt
@@ -0,0 +1,5 @@
+boto3
+opencv-python
+numpy
+pika
+
diff --git a/backend/worker/worker.py b/backend/worker/worker.py
new file mode 100644
index 0000000..9a8f32b
--- /dev/null
+++ b/backend/worker/worker.py
@@ -0,0 +1,129 @@
+import pika
+import json
+import boto3
+import cv2
+import numpy as np
+import tempfile
+import os
+import base64
+import hashlib
+import logging
+import uuid
+
+# Configure logging
+logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger(__name__)
+
+s3 = boto3.client('s3')
+BUCKET_NAME_PROCESSED = "processed-images-allowing-griffon"
+
+def hash_image(image_data):
+ return hashlib.sha256(image_data).hexdigest()
+
+def process_image(part_data, operation):
+ try:
+ nparr = np.frombuffer(part_data, np.uint8)
+ img = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
+
+ if img is None:
+ logger.error("Failed to decode image data.")
+ return None
+
+ if operation == 'edge_detection':
+ processed_img = cv2.Canny(img, 100, 200)
+ elif operation == 'color_inversion':
+ processed_img = cv2.bitwise_not(img)
+ elif operation == 'grayscale':
+ processed_img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
+ elif operation == 'blur':
+ processed_img = cv2.GaussianBlur(img, (15, 15), 0)
+ elif operation == 'sharpen':
+ kernel = np.array([[0, -1, 0], [-1, 5, -1], [0, -1, 0]])
+ processed_img = cv2.filter2D(img, -1, kernel)
+ elif operation == 'brightness_increase':
+ processed_img = cv2.convertScaleAbs(img, alpha=1.1, beta=30)
+ elif operation == 'contrast_increase':
+ lab = cv2.cvtColor(img, cv2.COLOR_BGR2LAB)
+ l, a, b = cv2.split(lab)
+ l = cv2.equalizeHist(l)
+ lab = cv2.merge((l, a, b))
+ processed_img = cv2.cvtColor(lab, cv2.COLOR_LAB2BGR)
+ elif operation == "sharpening":
+ kernel = np.array([[-1, -1, -1], [-1, 9, -1], [-1, -1, -1]])
+ result = cv2.filter2D(image, -1, kernel)
+ else:
+ logger.error(f"Unknown operation: {operation}")
+ return None
+
+ unique_id = uuid.uuid4()
+ output_path = os.path.join(tempfile.gettempdir(), f'processed_image_part_{unique_id}.jpg')
+ success = cv2.imwrite(output_path, processed_img)
+
+ if not success:
+ logger.error("Failed to write processed image to file.")
+ return None
+
+ return output_path
+ except Exception as e:
+ logger.exception("Exception occurred during image processing.")
+ return None
+
+def compute_md5(file_path):
+ hash_md5 = hashlib.md5()
+ with open(file_path, "rb") as f:
+ for chunk in iter(lambda: f.read(4096), b""):
+ hash_md5.update(chunk)
+ return hash_md5.digest()
+
+def callback(ch, method, properties, body):
+ task = json.loads(body)
+ part_data = base64.b64decode(task['part_data'])
+ filename = task['filename']
+ part_num = task['part_num']
+ operation = task['operation']
+ callback_queue = task['callback_queue']
+
+ processed_part_path = process_image(part_data, operation)
+
+ if processed_part_path is None or not os.path.exists(processed_part_path):
+ logger.error(f"Processed file {processed_part_path} does not exist.")
+ ch.basic_ack(delivery_tag=method.delivery_tag)
+ return
+
+ try:
+ processed_filename = f"{filename}_part_{part_num}"
+ with open(processed_part_path, "rb") as processed_part_data:
+ file_content = processed_part_data.read()
+
+ md5_hash = compute_md5(processed_part_path)
+ base64_md5_hash = base64.b64encode(md5_hash).decode('utf-8')
+
+ s3.put_object(Bucket=BUCKET_NAME_PROCESSED, Key=processed_filename, Body=file_content, ContentType="image/jpg", ContentMD5=base64_md5_hash)
+ logger.info(f"Uploaded {processed_filename} to S3 bucket {BUCKET_NAME_PROCESSED}")
+
+ if os.path.exists(processed_part_path):
+ os.remove(processed_part_path)
+ logger.info(f"Removed processed file {processed_part_path}")
+
+ # Notify completion
+ connection = pika.BlockingConnection(pika.ConnectionParameters('<rabbitmq-public-ip'))
+ channel = connection.channel()
+ channel.basic_publish(exchange='', routing_key=callback_queue, body='Completed')
+ connection.close()
+ logger.info("Notification sent to callback queue.")
+
+ except Exception as e:
+ logger.exception("Exception occurred during S3 upload or notification.")
+
+ finally:
+ ch.basic_ack(delivery_tag=method.delivery_tag)
+
+def start_worker():
+ connection = pika.BlockingConnection(pika.ConnectionParameters('<rabbitmq-public-ip'))
+ channel = connection.channel()
+ channel.queue_declare(queue='image_tasks')
+ channel.basic_consume(queue='image_tasks', on_message_callback=callback)
+ channel.start_consuming()
+
+if __name__ == '__main__':
+ start_worker()
diff --git a/frontend/components.json b/frontend/components.json
new file mode 100644
index 0000000..1c6facd
--- /dev/null
+++ b/frontend/components.json
@@ -0,0 +1,17 @@
+{
+ "$schema": "https://ui.shadcn.com/schema.json",
+ "style": "default",
+ "rsc": false,
+ "tsx": true,
+ "tailwind": {
+ "config": "tailwind.config.js",
+ "css": "src/index.css",
+ "baseColor": "slate",
+ "cssVariables": true,
+ "prefix": ""
+ },
+ "aliases": {
+ "components": "@/components",
+ "utils": "@/lib/utils"
+ }
+} \ No newline at end of file
diff --git a/frontend/package.json b/frontend/package.json
index de7b933..5576892 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -10,11 +10,23 @@
"preview": "vite preview"
},
"dependencies": {
+ "@aws-sdk/client-cognito-identity": "^3.576.0",
+ "@aws-sdk/client-eks": "^3.576.0",
+ "@aws-sdk/credential-provider-cognito-identity": "^3.576.0",
+ "@radix-ui/react-select": "^2.0.0",
+ "@radix-ui/react-slot": "^1.0.2",
+ "aws-sdk": "^2.1620.0",
"axios": "^1.6.8",
- "react": "^18.2.0",
- "react-dom": "^18.2.0"
+ "class-variance-authority": "^0.7.0",
+ "clsx": "^2.1.1",
+ "lucide-react": "^0.378.0",
+ "react": "^18.3.1",
+ "react-dom": "^18.2.0",
+ "tailwind-merge": "^2.3.0",
+ "tailwindcss-animate": "^1.0.7"
},
"devDependencies": {
+ "@types/node": "^20.12.12",
"@types/react": "^18.2.66",
"@types/react-dom": "^18.2.22",
"@typescript-eslint/eslint-plugin": "^7.2.0",
diff --git a/frontend/pnpm-lock.yaml b/frontend/pnpm-lock.yaml
index 24922b9..b356ac4 100644
--- a/frontend/pnpm-lock.yaml
+++ b/frontend/pnpm-lock.yaml
@@ -5,17 +5,53 @@ settings:
excludeLinksFromLockfile: false
dependencies:
+ '@aws-sdk/client-cognito-identity':
+ specifier: ^3.576.0
+ version: 3.576.0
+ '@aws-sdk/client-eks':
+ specifier: ^3.576.0
+ version: 3.576.0
+ '@aws-sdk/credential-provider-cognito-identity':
+ specifier: ^3.576.0
+ version: 3.576.0
+ '@radix-ui/react-select':
+ specifier: ^2.0.0
+ version: 2.0.0(@types/react-dom@18.3.0)(@types/react@18.3.1)(react-dom@18.3.1)(react@18.3.1)
+ '@radix-ui/react-slot':
+ specifier: ^1.0.2
+ version: 1.0.2(@types/react@18.3.1)(react@18.3.1)
+ aws-sdk:
+ specifier: ^2.1620.0
+ version: 2.1620.0
axios:
specifier: ^1.6.8
version: 1.6.8
+ class-variance-authority:
+ specifier: ^0.7.0
+ version: 0.7.0
+ clsx:
+ specifier: ^2.1.1
+ version: 2.1.1
+ lucide-react:
+ specifier: ^0.378.0
+ version: 0.378.0(react@18.3.1)
react:
- specifier: ^18.2.0
+ specifier: ^18.3.1
version: 18.3.1
react-dom:
specifier: ^18.2.0
version: 18.3.1(react@18.3.1)
+ tailwind-merge:
+ specifier: ^2.3.0
+ version: 2.3.0
+ tailwindcss-animate:
+ specifier: ^1.0.7
+ version: 1.0.7(tailwindcss@3.4.3)
devDependencies:
+ '@types/node':
+ specifier: ^20.12.12
+ version: 20.12.12
'@types/react':
specifier: ^18.2.66
version: 18.3.1
@@ -54,14 +90,13 @@ devDependencies:
version: 5.4.5
vite:
specifier: ^5.2.0
- version: 5.2.10
+ version: 5.2.10(@types/node@20.12.12)
packages:
/@alloc/quick-lru@5.2.0:
resolution: {integrity: sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==}
engines: {node: '>=10'}
- dev: true
/@ampproject/remapping@2.3.0:
resolution: {integrity: sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==}
@@ -71,6 +106,546 @@ packages:
'@jridgewell/trace-mapping': 0.3.25
dev: true
+ /@aws-crypto/ie11-detection@3.0.0:
+ resolution: {integrity: sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==}
+ dependencies:
+ tslib: 1.14.1
+ dev: false
+
+ /@aws-crypto/sha256-browser@3.0.0:
+ resolution: {integrity: sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ==}
+ dependencies:
+ '@aws-crypto/ie11-detection': 3.0.0
+ '@aws-crypto/sha256-js': 3.0.0
+ '@aws-crypto/supports-web-crypto': 3.0.0
+ '@aws-crypto/util': 3.0.0
+ '@aws-sdk/types': 3.575.0
+ '@aws-sdk/util-locate-window': 3.568.0
+ '@aws-sdk/util-utf8-browser': 3.259.0
+ tslib: 1.14.1
+ dev: false
+
+ /@aws-crypto/sha256-js@3.0.0:
+ resolution: {integrity: sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==}
+ dependencies:
+ '@aws-crypto/util': 3.0.0
+ '@aws-sdk/types': 3.575.0
+ tslib: 1.14.1
+ dev: false
+
+ /@aws-crypto/supports-web-crypto@3.0.0:
+ resolution: {integrity: sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==}
+ dependencies:
+ tslib: 1.14.1
+ dev: false
+
+ /@aws-crypto/util@3.0.0:
+ resolution: {integrity: sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==}
+ dependencies:
+ '@aws-sdk/types': 3.575.0
+ '@aws-sdk/util-utf8-browser': 3.259.0
+ tslib: 1.14.1
+ dev: false
+
+ /@aws-sdk/client-cognito-identity@3.576.0:
+ resolution: {integrity: sha512-SgfR1LLZWT1NrNOB968OKC8RAbaQUFG4V1eDjAeNjtuqC7iAlY9Ogrl824XJY4muz4ErVAga7A+Xn9QTOSSTBQ==}
+ engines: {node: '>=16.0.0'}
+ dependencies:
+ '@aws-crypto/sha256-browser': 3.0.0
+ '@aws-crypto/sha256-js': 3.0.0
+ '@aws-sdk/client-sso-oidc': 3.576.0(@aws-sdk/client-sts@3.576.0)
+ '@aws-sdk/client-sts': 3.576.0
+ '@aws-sdk/core': 3.576.0
+ '@aws-sdk/credential-provider-node': 3.576.0(@aws-sdk/client-sso-oidc@3.576.0)(@aws-sdk/client-sts@3.576.0)
+ '@aws-sdk/middleware-host-header': 3.575.0
+ '@aws-sdk/middleware-logger': 3.575.0
+ '@aws-sdk/middleware-recursion-detection': 3.575.0
+ '@aws-sdk/middleware-user-agent': 3.575.0
+ '@aws-sdk/region-config-resolver': 3.575.0
+ '@aws-sdk/types': 3.575.0
+ '@aws-sdk/util-endpoints': 3.575.0
+ '@aws-sdk/util-user-agent-browser': 3.575.0
+ '@aws-sdk/util-user-agent-node': 3.575.0
+ '@smithy/config-resolver': 3.0.0
+ '@smithy/core': 2.0.1
+ '@smithy/fetch-http-handler': 3.0.1
+ '@smithy/hash-node': 3.0.0
+ '@smithy/invalid-dependency': 3.0.0
+ '@smithy/middleware-content-length': 3.0.0
+ '@smithy/middleware-endpoint': 3.0.0
+ '@smithy/middleware-retry': 3.0.1
+ '@smithy/middleware-serde': 3.0.0
+ '@smithy/middleware-stack': 3.0.0
+ '@smithy/node-config-provider': 3.0.0
+ '@smithy/node-http-handler': 3.0.0
+ '@smithy/protocol-http': 4.0.0
+ '@smithy/smithy-client': 3.0.1
+ '@smithy/types': 3.0.0
+ '@smithy/url-parser': 3.0.0
+ '@smithy/util-base64': 3.0.0
+ '@smithy/util-body-length-browser': 3.0.0
+ '@smithy/util-body-length-node': 3.0.0
+ '@smithy/util-defaults-mode-browser': 3.0.1
+ '@smithy/util-defaults-mode-node': 3.0.1
+ '@smithy/util-endpoints': 2.0.0
+ '@smithy/util-middleware': 3.0.0
+ '@smithy/util-retry': 3.0.0
+ '@smithy/util-utf8': 3.0.0
+ tslib: 2.6.2
+ transitivePeerDependencies:
+ - aws-crt
+ dev: false
+
+ /@aws-sdk/client-eks@3.576.0:
+ resolution: {integrity: sha512-UkxIj8GtkZMYMIAAcObzOCjQxTTnvyGqqmcpGb3ZJ5ONJ0XgLRgZWo+F83eRconjrwdt2jZR6ez9bAwaKxDw3Q==}
+ engines: {node: '>=16.0.0'}
+ dependencies:
+ '@aws-crypto/sha256-browser': 3.0.0
+ '@aws-crypto/sha256-js': 3.0.0
+ '@aws-sdk/client-sso-oidc': 3.576.0(@aws-sdk/client-sts@3.576.0)
+ '@aws-sdk/client-sts': 3.576.0
+ '@aws-sdk/core': 3.576.0
+ '@aws-sdk/credential-provider-node': 3.576.0(@aws-sdk/client-sso-oidc@3.576.0)(@aws-sdk/client-sts@3.576.0)
+ '@aws-sdk/middleware-host-header': 3.575.0
+ '@aws-sdk/middleware-logger': 3.575.0
+ '@aws-sdk/middleware-recursion-detection': 3.575.0
+ '@aws-sdk/middleware-user-agent': 3.575.0
+ '@aws-sdk/region-config-resolver': 3.575.0
+ '@aws-sdk/types': 3.575.0
+ '@aws-sdk/util-endpoints': 3.575.0
+ '@aws-sdk/util-user-agent-browser': 3.575.0
+ '@aws-sdk/util-user-agent-node': 3.575.0
+ '@smithy/config-resolver': 3.0.0
+ '@smithy/core': 2.0.1
+ '@smithy/fetch-http-handler': 3.0.1
+ '@smithy/hash-node': 3.0.0
+ '@smithy/invalid-dependency': 3.0.0
+ '@smithy/middleware-content-length': 3.0.0
+ '@smithy/middleware-endpoint': 3.0.0
+ '@smithy/middleware-retry': 3.0.1
+ '@smithy/middleware-serde': 3.0.0
+ '@smithy/middleware-stack': 3.0.0
+ '@smithy/node-config-provider': 3.0.0
+ '@smithy/node-http-handler': 3.0.0
+ '@smithy/protocol-http': 4.0.0
+ '@smithy/smithy-client': 3.0.1
+ '@smithy/types': 3.0.0
+ '@smithy/url-parser': 3.0.0
+ '@smithy/util-base64': 3.0.0
+ '@smithy/util-body-length-browser': 3.0.0
+ '@smithy/util-body-length-node': 3.0.0
+ '@smithy/util-defaults-mode-browser': 3.0.1
+ '@smithy/util-defaults-mode-node': 3.0.1
+ '@smithy/util-endpoints': 2.0.0
+ '@smithy/util-middleware': 3.0.0
+ '@smithy/util-retry': 3.0.0
+ '@smithy/util-utf8': 3.0.0
+ '@smithy/util-waiter': 3.0.0
+ tslib: 2.6.2
+ uuid: 9.0.1
+ transitivePeerDependencies:
+ - aws-crt
+ dev: false
+
+ /@aws-sdk/client-sso-oidc@3.576.0(@aws-sdk/client-sts@3.576.0):
+ resolution: {integrity: sha512-6U8933O9h6iMnQDpH3OtFhS3G3FVttYZUqTpC2T0FnSSX7zgG0GnlxdQiyZh1j1aFrEB8bFw/RSmxPcMJJuSlQ==}
+ engines: {node: '>=16.0.0'}
+ dependencies:
+ '@aws-crypto/sha256-browser': 3.0.0
+ '@aws-crypto/sha256-js': 3.0.0
+ '@aws-sdk/client-sts': 3.576.0
+ '@aws-sdk/core': 3.576.0
+ '@aws-sdk/credential-provider-node': 3.576.0(@aws-sdk/client-sso-oidc@3.576.0)(@aws-sdk/client-sts@3.576.0)
+ '@aws-sdk/middleware-host-header': 3.575.0
+ '@aws-sdk/middleware-logger': 3.575.0
+ '@aws-sdk/middleware-recursion-detection': 3.575.0
+ '@aws-sdk/middleware-user-agent': 3.575.0
+ '@aws-sdk/region-config-resolver': 3.575.0
+ '@aws-sdk/types': 3.575.0
+ '@aws-sdk/util-endpoints': 3.575.0
+ '@aws-sdk/util-user-agent-browser': 3.575.0
+ '@aws-sdk/util-user-agent-node': 3.575.0
+ '@smithy/config-resolver': 3.0.0
+ '@smithy/core': 2.0.1
+ '@smithy/fetch-http-handler': 3.0.1
+ '@smithy/hash-node': 3.0.0
+ '@smithy/invalid-dependency': 3.0.0
+ '@smithy/middleware-content-length': 3.0.0
+ '@smithy/middleware-endpoint': 3.0.0
+ '@smithy/middleware-retry': 3.0.1
+ '@smithy/middleware-serde': 3.0.0
+ '@smithy/middleware-stack': 3.0.0
+ '@smithy/node-config-provider': 3.0.0
+ '@smithy/node-http-handler': 3.0.0
+ '@smithy/protocol-http': 4.0.0
+ '@smithy/smithy-client': 3.0.1
+ '@smithy/types': 3.0.0
+ '@smithy/url-parser': 3.0.0
+ '@smithy/util-base64': 3.0.0
+ '@smithy/util-body-length-browser': 3.0.0
+ '@smithy/util-body-length-node': 3.0.0
+ '@smithy/util-defaults-mode-browser': 3.0.1
+ '@smithy/util-defaults-mode-node': 3.0.1
+ '@smithy/util-endpoints': 2.0.0
+ '@smithy/util-middleware': 3.0.0
+ '@smithy/util-retry': 3.0.0
+ '@smithy/util-utf8': 3.0.0
+ tslib: 2.6.2
+ transitivePeerDependencies:
+ - '@aws-sdk/client-sts'
+ - aws-crt
+ dev: false
+
+ /@aws-sdk/client-sso@3.576.0:
+ resolution: {integrity: sha512-xbKE4bf3HYvkdrvn5kkpUdcoi3mg7uDLLkSbGaj0tzW3vNSdx9qLrCMuwfV7KrhVKWwx+lnw/2LGuCR2B5y0IA==}
+ engines: {node: '>=16.0.0'}
+ dependencies:
+ '@aws-crypto/sha256-browser': 3.0.0
+ '@aws-crypto/sha256-js': 3.0.0
+ '@aws-sdk/core': 3.576.0
+ '@aws-sdk/middleware-host-header': 3.575.0
+ '@aws-sdk/middleware-logger': 3.575.0
+ '@aws-sdk/middleware-recursion-detection': 3.575.0
+ '@aws-sdk/middleware-user-agent': 3.575.0
+ '@aws-sdk/region-config-resolver': 3.575.0
+ '@aws-sdk/types': 3.575.0
+ '@aws-sdk/util-endpoints': 3.575.0
+ '@aws-sdk/util-user-agent-browser': 3.575.0
+ '@aws-sdk/util-user-agent-node': 3.575.0
+ '@smithy/config-resolver': 3.0.0
+ '@smithy/core': 2.0.1
+ '@smithy/fetch-http-handler': 3.0.1
+ '@smithy/hash-node': 3.0.0
+ '@smithy/invalid-dependency': 3.0.0
+ '@smithy/middleware-content-length': 3.0.0
+ '@smithy/middleware-endpoint': 3.0.0
+ '@smithy/middleware-retry': 3.0.1
+ '@smithy/middleware-serde': 3.0.0
+ '@smithy/middleware-stack': 3.0.0
+ '@smithy/node-config-provider': 3.0.0
+ '@smithy/node-http-handler': 3.0.0
+ '@smithy/protocol-http': 4.0.0
+ '@smithy/smithy-client': 3.0.1
+ '@smithy/types': 3.0.0
+ '@smithy/url-parser': 3.0.0
+ '@smithy/util-base64': 3.0.0
+ '@smithy/util-body-length-browser': 3.0.0
+ '@smithy/util-body-length-node': 3.0.0
+ '@smithy/util-defaults-mode-browser': 3.0.1
+ '@smithy/util-defaults-mode-node': 3.0.1
+ '@smithy/util-endpoints': 2.0.0
+ '@smithy/util-middleware': 3.0.0
+ '@smithy/util-retry': 3.0.0
+ '@smithy/util-utf8': 3.0.0
+ tslib: 2.6.2
+ transitivePeerDependencies:
+ - aws-crt
+ dev: false
+
+ /@aws-sdk/client-sts@3.576.0:
+ resolution: {integrity: sha512-GHqqfRcUW/nGE4lpRafNKRxi4K7+SaQjYLjQnTEioUhr+w1IT/fFb3rGZYHHnN9ZCzbnrBULRC+1XOPIQWyLsw==}
+ engines: {node: '>=16.0.0'}
+ dependencies:
+ '@aws-crypto/sha256-browser': 3.0.0
+ '@aws-crypto/sha256-js': 3.0.0
+ '@aws-sdk/client-sso-oidc': 3.576.0(@aws-sdk/client-sts@3.576.0)
+ '@aws-sdk/core': 3.576.0
+ '@aws-sdk/credential-provider-node': 3.576.0(@aws-sdk/client-sso-oidc@3.576.0)(@aws-sdk/client-sts@3.576.0)
+ '@aws-sdk/middleware-host-header': 3.575.0
+ '@aws-sdk/middleware-logger': 3.575.0
+ '@aws-sdk/middleware-recursion-detection': 3.575.0
+ '@aws-sdk/middleware-user-agent': 3.575.0
+ '@aws-sdk/region-config-resolver': 3.575.0
+ '@aws-sdk/types': 3.575.0
+ '@aws-sdk/util-endpoints': 3.575.0
+ '@aws-sdk/util-user-agent-browser': 3.575.0
+ '@aws-sdk/util-user-agent-node': 3.575.0
+ '@smithy/config-resolver': 3.0.0
+ '@smithy/core': 2.0.1
+ '@smithy/fetch-http-handler': 3.0.1
+ '@smithy/hash-node': 3.0.0
+ '@smithy/invalid-dependency': 3.0.0
+ '@smithy/middleware-content-length': 3.0.0
+ '@smithy/middleware-endpoint': 3.0.0
+ '@smithy/middleware-retry': 3.0.1
+ '@smithy/middleware-serde': 3.0.0
+ '@smithy/middleware-stack': 3.0.0
+ '@smithy/node-config-provider': 3.0.0
+ '@smithy/node-http-handler': 3.0.0
+ '@smithy/protocol-http': 4.0.0
+ '@smithy/smithy-client': 3.0.1
+ '@smithy/types': 3.0.0