108 lines
2.9 KiB
Python
108 lines
2.9 KiB
Python
#!/usr/bin/env python3
|
|
import os
|
|
import math
|
|
import sys
|
|
|
|
import numpy as np
|
|
from sklearn.linear_model import LinearRegression
|
|
import matplotlib.pyplot as plt
|
|
|
|
# sample data: multiple measurements per dist
|
|
# in meters
|
|
distances = np.array([5.0, 10.0, 15.0, 18.5, 23.5])
|
|
|
|
# measured path loss in db for each dist (rows:distances, columns:measurements)
|
|
# example: 3 measurements per dist
|
|
|
|
#measured_PL = np.array([], [], [], [], [])
|
|
|
|
# i got these using scott's get_data script. thanks scott
|
|
pinst10 = [14+22.17792627372312,
|
|
14+26.5983685883656,
|
|
14+22.797848894157955,
|
|
14+23.014822745152888,
|
|
14+21.45208671888342]
|
|
|
|
pinst15 = [14+25.20854283824081,
|
|
14+25.196382621588043,
|
|
14+27.146880284259304,
|
|
14+23.50194049443653,
|
|
14+26.735523841588087]
|
|
|
|
pinst185 = [14+26.391715760893703,
|
|
14+28.1335104958795,
|
|
14+29.153177713518723,
|
|
14+29.847186387317596,
|
|
14+30.558908425543827]
|
|
|
|
pinst235 = [14+31.755831065931034,
|
|
14+31.883960187854985,
|
|
14+32.063876077498804,
|
|
14+31.643081143951544,
|
|
14+32.45807181713604]
|
|
|
|
pinst5 = [14+19.567076092915567,
|
|
14+-20.05316841156921,
|
|
14+18.774664683716043,
|
|
14+20.252929968934183,
|
|
14+19.01518926375179]
|
|
|
|
iterate = 0
|
|
cursor = 0
|
|
dist_cur = distances[cursor]
|
|
|
|
# oh god
|
|
measured_PL = [pinst5, pinst10, pinst15, pinst185, pinst235]
|
|
|
|
# ref dist
|
|
d0 = 5
|
|
PL_d0 = np.mean(measured_PL[0])
|
|
PL_d1 = np.mean(measured_PL[1])
|
|
PL_d2 = np.mean(measured_PL[2])
|
|
PL_d3 = np.mean(measured_PL[3])
|
|
PL_d4 = np.mean(measured_PL[4])
|
|
|
|
print(f"Reference distance: {d0}m")
|
|
print(f"Mean Path-Loss at ref dist: about {PL_d0:.4f}")
|
|
|
|
j = 1
|
|
for i in [PL_d0, PL_d1, PL_d2, PL_d3, PL_d4]:
|
|
print(f"Computed Path loss #{j}: {i}")
|
|
j += 1
|
|
|
|
# prepare data for linear regression
|
|
# compute mean PL for each dist
|
|
mean_PL = np.mean(measured_PL, axis=1)
|
|
X = 10 * np.log10(distances / d0).reshape(-1, 1)
|
|
Y = mean_PL - PL_d0 # relative path loss
|
|
|
|
# fit linear regression
|
|
model = LinearRegression()
|
|
model.fit(X, Y)
|
|
|
|
# extract path-loss exponent (slope)
|
|
n = model.coef_[0]
|
|
print(f'Estimated path-loss exponent n: {n:.2f}')
|
|
|
|
# predicted path loss from model
|
|
predicted_PL = PL_d0 + model.predict(X)
|
|
|
|
# calc shadowing std. deviation using all measurements
|
|
residuals = measured_PL = predicted_PL[:, np.newaxis] # residuals for all
|
|
# measurements
|
|
sigma = np.std(residuals)
|
|
|
|
print(f'Shadowing standard deviation: {sigma:.2f} dB')
|
|
|
|
# plot
|
|
plt.figure(figsize=(8,6))
|
|
plt.scatter(np.repeat(X, measured_PL.shape[1]), measured_PL.flatten(),
|
|
label='Measured PL')
|
|
plt.plot(X, predicted_PL, color='red', label='Fitted line')
|
|
plt.xlabel('10*log10(d/d0)')
|
|
plt.ylabel('Path Loss (dB)')
|
|
plt.title('Path Loss Experiment & Shadowing Estimation')
|
|
plt.legend()
|
|
plt.grid(True)
|
|
plt.show()
|