import asyncio
import aioboto3
import json
import time
import uuid
import random
# Configuration
QUEUE_URL = '<https://sqs.ap-northeast-2.amazonaws.com/200148130345/user-event-queue>'
REGION_NAME = 'ap-northeast-2'
MESSAGES_PER_BATCH = 500
DELAY_SECONDS = 1 # 두 번째 전송 전 대기 시간
def generate_fake_event():
return {
"id": str(uuid.uuid4()),
"timestamp": time.time(),
"action": random.choice(["create", "update", "delete"]),
"user": f"user{random.randint(1, 100)}"
}
async def send_message(sqs, message):
await sqs.send_message(
QueueUrl=QUEUE_URL,
MessageBody=json.dumps(message)
)
async def send_batch(batch_number, session):
async with session.client("sqs", region_name=REGION_NAME) as sqs:
tasks = [
send_message(sqs, generate_fake_event())
for _ in range(MESSAGES_PER_BATCH)
]
await asyncio.gather(*tasks)
print(f"[Batch {batch_number}] Sent {MESSAGES_PER_BATCH} messages.")
async def main():
session = aioboto3.Session()
await send_batch(1, session)
await asyncio.sleep(DELAY_SECONDS)
await send_batch(2, session)
if __name__ == "__main__":
asyncio.run(main())
pip install aioboto3
python3 spike_producer.py



- concurrent 44개를 가지고 for문 내에서 1000개의 데이터를 놓치지 않고 sqs에서 받아와서 처리 했다는 걸 확인할 수 있다.
