Skip to content

Commit

Permalink
More better stuff
Browse files Browse the repository at this point in the history
  • Loading branch information
EVDOG4LIFE authored Jun 6, 2024
1 parent 84f8e67 commit 2df1cb5
Showing 1 changed file with 85 additions and 11 deletions.
96 changes: 85 additions & 11 deletions DBSeeder.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,56 @@
database_id = '1234' # Your database ID here
collection_id = '1234' # Your collection ID here

response_times = []
write_response_times = []
read_response_times = []

write_start_time = 0
write_end_time = 0
read_start_time = 0
read_end_time = 0

# Categorization dictionaries
write_latency_categories = {
'<1000ms': 0,
'1000-2000ms': 0,
'2000-3000ms': 0,
'3000-4000ms': 0,
'4000-5000ms': 0,
'>5000ms': 0
}

read_latency_categories = {
'<1000ms': 0,
'1000-2000ms': 0,
'2000-3000ms': 0,
'3000-4000ms': 0,
'4000-5000ms': 0,
'>5000ms': 0
}

def categorize_latency(response_time_ms, category_dict):
"""Categorize the response time into the appropriate latency category."""
if response_time_ms < 1000:
category_dict['<1000ms'] += 1
elif response_time_ms < 2000:
category_dict['1000-2000ms'] += 1
elif response_time_ms < 3000:
category_dict['2000-3000ms'] += 1
elif response_time_ms < 4000:
category_dict['3000-4000ms'] += 1
elif response_time_ms < 5000:
category_dict['4000-5000ms'] += 1
else:
category_dict['>5000ms'] += 1

def create_user_document():
"""Create a user document with random data."""
global write_start_time
global write_end_time

if not write_start_time:
write_start_time = time.perf_counter()

name = fake.name()
email = fake.email()
age = random.randint(18, 100)
Expand All @@ -45,21 +91,31 @@ def create_user_document():
)
end_time = time.perf_counter()
response_time_ms = (end_time - start_time) * 1000
response_times.append(response_time_ms)
write_response_times.append(response_time_ms)
categorize_latency(response_time_ms, write_latency_categories)
logging.info(f"Inserted: {response['$id']} - Response Time: {response_time_ms:.2f} ms")
return response['$id'], email # Return both document ID and email
except Exception as e:
logging.error(f"Failed to insert document: {e}")
return None, None
finally:
write_end_time = time.perf_counter()

def verify_document(doc_id, expected_email):
"""Verify a single document."""
global read_start_time
global read_end_time

if not read_start_time:
read_start_time = time.perf_counter()

try:
start_time = time.perf_counter()
document = databases.get_document(database_id=database_id, collection_id=collection_id, document_id=doc_id)
end_time = time.perf_counter()
response_time_ms = (end_time - start_time) * 1000
response_times.append(response_time_ms)
read_response_times.append(response_time_ms)
categorize_latency(response_time_ms, read_latency_categories)
# Check if 'email' is in document and not None, then verify its equality to expected_email
if document and 'email' in document and document['email'] is not None and document['email'] == expected_email:
logging.info(f"Verified document: {doc_id}, email: {document.get('email')} - Response Time: {response_time_ms:.2f} ms")
Expand All @@ -71,6 +127,8 @@ def verify_document(doc_id, expected_email):
except Exception as e:
logging.error(f"Failed to verify document {doc_id}: {e}")
return False
finally:
read_end_time = time.perf_counter()

def verify_documents(document_ids):
"""Verify multiple documents concurrently."""
Expand All @@ -82,7 +140,7 @@ def verify_documents(document_ids):
verified_count += 1
return verified_count

def seed_users_parallel(count=150):
def seed_users_parallel(count=500):
"""Seed users concurrently."""
document_ids_with_email = []
with ThreadPoolExecutor() as executor:
Expand All @@ -95,13 +153,29 @@ def seed_users_parallel(count=150):
verified_count = verify_documents(document_ids_with_email)
logging.info(f"Total documents verified: {verified_count}")

# Calculate and print response time summary
if response_times:
logging.info(f"Slowest request: {max(response_times):.2f} ms")
logging.info(f"Fastest request: {min(response_times):.2f} ms")
logging.info(f"Average request time: {sum(response_times) / len(response_times):.2f} ms")
# Calculate and print response time summary for writes
if write_response_times:
logging.info(f"Write Requests - Slowest request: {max(write_response_times):.2f} ms")
logging.info(f"Write Requests - Fastest request: {min(write_response_times):.2f} ms")
logging.info(f"Write Requests - Average request time: {sum(write_response_times) / len(write_response_times):.2f} ms")
total_write_time = write_end_time - write_start_time
write_tps = len(write_response_times) / total_write_time
logging.info(f"Write Requests - Transactions per second (TPS): {write_tps:.2f}")
logging.info(f"Write Requests - Latency Categories: {write_latency_categories}")
else:
logging.info("No write response times recorded.")

# Calculate and print response time summary for reads
if read_response_times:
logging.info(f"Read Requests - Slowest request: {max(read_response_times):.2f} ms")
logging.info(f"Read Requests - Fastest request: {min(read_response_times):.2f} ms")
logging.info(f"Read Requests - Average request time: {sum(read_response_times) / len(read_response_times):.2f} ms")
total_read_time = read_end_time - read_start_time
read_tps = len(read_response_times) / total_read_time
logging.info(f"Read Requests - Transactions per second (TPS): {read_tps:.2f}")
logging.info(f"Read Requests - Latency Categories: {read_latency_categories}")
else:
logging.info("No response times recorded.")
logging.info("No read response times recorded.")

if __name__ == '__main__':
seed_users_parallel(50) # Replace with a reasonable number for testing
seed_users_parallel(1000) # Replace with a reasonable number for testing

0 comments on commit 2df1cb5

Please sign in to comment.