import numpy as np
def differential_privacy(query_result, epsilon):
# Generate Laplace noise
sensitivity = sensitivity_of_query() # Sensitivity of query
scale = sensitivity / epsilon
laplace_noise = np.random.laplace(0, scale, len(query_result))
# Add noise to query results
private_result = query_result + laplace_noise
return private_result
def query(data):
# Simulate query operations, such as calculating the average value
result = np.mean(data)
return result
def sensitivity_of_query():
# The sensitivity of the query can be adjusted according to the specific circumstances
return 1.0
# Sample Data
data = np.array([10, 12, 15, 18, 20])
# Query and apply differential privacy
query_result = query(data)
epsilon = 0.5
private_result = differential_privacy(query_result, epsilon)
# Output Result
print("Query Result:", query_result)
print("Private Result with Differential Privacy:", private_result)