Jeffery Russell 5 years ago
parent
commit
93db9a57da
14 changed files with 6389 additions and 3 deletions
  1. +25
    -0
      data_exploration/wellness_cluster_test.py
  2. +1
    -1
      data_preparation/cleaned/expSmoothWorkAndFatigueData.csv
  3. +5012
    -0
      data_preparation/cleaned/time_series_normalized_wellness_menstruation.csv
  4. +1
    -1
      data_preparation/cleaned/time_series_notnormalized_with_0Nan_rpe.csv
  5. +1
    -1
      data_preparation/time_series.py
  6. +13
    -0
      findings/data_plot.py
  7. +0
    -0
      findings/r2correlation.txt
  8. +336
    -0
      hypotheses_modeling/21DaySlidingWorkAverage.csv
  9. +188
    -0
      hypotheses_modeling/KerasRegressions.py
  10. +355
    -0
      hypotheses_modeling/fatigue_total_sum.csv
  11. +28
    -0
      hypotheses_modeling/hypotheses.txt
  12. +350
    -0
      hypotheses_modeling/slidingWorkAverageSevenDay.csv
  13. +61
    -0
      hypotheses_modeling/team_regressions.py
  14. +18
    -0
      hypotheses_modeling/time_series_days_ranked.csv

+ 25
- 0
data_exploration/wellness_cluster_test.py View File

@ -0,0 +1,25 @@
import sklearn.cluster as cluster
import pandas as pd
from matplotlib import pyplot as plt
def find_kmeans(mat, k, first, second):
km = cluster.KMeans(n_clusters=k)
km.fit(mat)
# Plot sse against k
plt.figure(figsize=(6, 6))
plt.xlabel('Metric: ' + first)
plt.ylabel('Metric: ' + second)
plt.scatter(mat[:, 0], mat[:, 1], c=km.labels_, cmap='rainbow')
plt.show()
# Read csv in
df = pd.read_csv('../data_preparation/cleaned/time_series_normalized_wellness_menstruation.csv')
# Specify what things you want
df = df[["normFatigue", "normSleepQuality"]]
# values, num clusters, axis labelsg
find_kmeans(df.values, 2, "normFatigue", "normSleepQuality")

+ 1
- 1
data_preparation/cleaned/expSmoothWorkAndFatigueData.csv View File

@ -1,4 +1,4 @@
"","day","totalWork","averageWorkLoad","smoothedWork","smoothedFatigueData"
"","TimeSinceAugFirst","totalWork","averageWorkLoad","smoothedWork","smoothedFatigueData"
"1",0,4855,255.526315789474,4855,0.734457489003587 "1",0,4855,255.526315789474,4855,0.734457489003587
"2",1,1380,138,5921.8864123285,1.17226558337237 "2",1,1380,138,5921.8864123285,1.17226558337237
"3",2,6030,231.923076923077,11569.9661032971,1.37713723445689 "3",2,6030,231.923076923077,11569.9661032971,1.37713723445689

+ 5012
- 0
data_preparation/cleaned/time_series_normalized_wellness_menstruation.csv
File diff suppressed because it is too large
View File


+ 1
- 1
data_preparation/cleaned/time_series_notnormalized_with_0Nan_rpe.csv View File

@ -36,8 +36,8 @@
34,34,34,2018-07-17,4,1,Skills,60.0,5.0,300.0,300.0,107.1,309.11,0.35,7.0,8.0,,0,0,1,0,0,0,0,0,0,0,0,1,350 34,34,34,2018-07-17,4,1,Skills,60.0,5.0,300.0,300.0,107.1,309.11,0.35,7.0,8.0,,0,0,1,0,0,0,0,0,0,0,0,1,350
35,35,35,2018-07-17,5,1,Skills,60.0,5.0,300.0,300.0,42.9,100.71,0.43,9.0,9.0,,0,0,1,0,0,0,0,0,0,0,0,1,350 35,35,35,2018-07-17,5,1,Skills,60.0,5.0,300.0,300.0,42.9,100.71,0.43,9.0,9.0,,0,0,1,0,0,0,0,0,0,0,0,1,350
36,36,36,2018-07-17,6,1,Speed,30.0,3.0,90.0,370.0,52.9,333.5,0.16,0.0,0.0,,0,0,0,0,0,0,1,0,0,0,0,1,350 36,36,36,2018-07-17,6,1,Speed,30.0,3.0,90.0,370.0,52.9,333.5,0.16,0.0,0.0,,0,0,0,0,0,0,1,0,0,0,0,1,350
37,37,37,2018-07-17,6,1,Conditioning,35.0,8.0,280.0,0.0,0.0,0.0,0.0,0.0,0.0,,0,0,0,1,0,0,0,0,0,0,0,1,350
38,38,38,2018-07-17,7,1,Skills,75.0,6.0,450.0,450.0,205.7,401.79,0.51,0.0,0.0,,0,0,1,0,0,0,0,0,0,0,0,1,350 38,38,38,2018-07-17,7,1,Skills,75.0,6.0,450.0,450.0,205.7,401.79,0.51,0.0,0.0,,0,0,1,0,0,0,0,0,0,0,0,1,350
37,37,37,2018-07-17,6,1,Conditioning,35.0,8.0,280.0,0.0,0.0,0.0,0.0,0.0,0.0,,0,0,0,1,0,0,0,0,0,0,0,1,350
39,39,39,2018-07-17,10,1,Skills,60.0,4.0,240.0,240.0,79.3,298.57,0.27,0.0,0.0,,0,0,1,0,0,0,0,0,0,0,0,1,350 39,39,39,2018-07-17,10,1,Skills,60.0,4.0,240.0,240.0,79.3,298.57,0.27,0.0,0.0,,0,0,1,0,0,0,0,0,0,0,0,1,350
40,40,40,2018-07-17,11,1,Skills,90.0,5.0,450.0,450.0,210.0,391.36,0.54,9.0,9.0,Absolutely,0,0,1,0,0,0,0,0,0,1,0,0,350 40,40,40,2018-07-17,11,1,Skills,90.0,5.0,450.0,450.0,210.0,391.36,0.54,9.0,9.0,Absolutely,0,0,1,0,0,0,0,0,0,1,0,0,350
41,41,41,2018-07-17,13,1,Skills,90.0,6.0,540.0,540.0,268.6,309.25,0.87,0.0,0.0,Not at all,0,0,1,0,0,0,0,0,1,0,0,0,350 41,41,41,2018-07-17,13,1,Skills,90.0,6.0,540.0,540.0,268.6,309.25,0.87,0.0,0.0,Not at all,0,0,1,0,0,0,0,0,1,0,0,0,350

+ 1
- 1
data_preparation/time_series.py View File

@ -30,4 +30,4 @@ def normalize_time_series(path, filename, start):
start = start_end_times("data/rpe.csv") start = start_end_times("data/rpe.csv")
normalize_time_series("data/games_ranked.csv", "games_ranked.csv", start)
normalize_time_series("cleaned/normalizedWellness.csv", "normalizedWellness.csv", start)

+ 13
- 0
findings/data_plot.py View File

@ -0,0 +1,13 @@
from matplotlib import pyplot as plt
import pandas as pd
def plot_xy(x, y):
plt.scatter(x, y)
plt.xlabel(x.name)
plt.ylabel(y.name)
plt.show()
wellness = pd.read_csv('../data_preparation/data/wellness.csv')
plot_xy(wellness['Fatigue'], wellness['SleepHours'])

+ 0
- 0
findings/r2correlation.txt View File


+ 336
- 0
hypotheses_modeling/21DaySlidingWorkAverage.csv View File

@ -0,0 +1,336 @@
"","TimeSinceAugFirst","slidingWorkAverage"
"1",20,5563.95
"2",21,5895.66666666667
"3",22,6307.42857142857
"4",23,6331.71428571429
"5",24,6458.52380952381
"6",25,6752.33333333333
"7",26,6767.33333333333
"8",27,6766.61904761905
"9",28,6738.42857142857
"10",29,6798.66666666667
"11",30,6811.38095238095
"12",31,6813.7619047619
"13",32,6881.85714285714
"14",33,6840.42857142857
"15",34,6834.71428571429
"16",35,6788.47619047619
"17",36,6711.09523809524
"18",37,6720.61904761905
"19",38,6781.33333333333
"20",39,6495.04761904762
"21",40,6617.90476190476
"22",41,6578.85714285714
"23",42,6122.66666666667
"24",43,5613.04761904762
"25",44,5671
"26",45,5383.2380952381
"27",46,4878.52380952381
"28",47,4872.90476190476
"29",48,4874.33333333333
"30",49,4910.85714285714
"31",50,4796.80952380952
"32",51,4867.42857142857
"33",52,5067.90476190476
"34",53,5234.04761904762
"35",54,5216.19047619048
"36",55,5235.47619047619
"37",56,5313.09523809524
"38",57,5416.90476190476
"39",58,5454.90476190476
"40",59,5578.47619047619
"41",60,5850.80952380952
"42",61,5744.38095238095
"43",62,5768.66666666667
"44",63,6210.71428571429
"45",64,6592.95238095238
"46",65,6613.90476190476
"47",66,6986.7619047619
"48",67,7557.09523809524
"49",68,7567.71428571429
"50",69,7583.42857142857
"51",70,7640.33333333333
"52",71,7712.38095238095
"53",72,7741.19047619048
"54",73,7612.85714285714
"55",74,7466
"56",75,7487.42857142857
"57",76,7480.28571428571
"58",77,7064.61904761905
"59",78,6618.42857142857
"60",79,6647.38095238095
"61",80,6575.95238095238
"62",81,5935.85714285714
"63",82,6326.33333333333
"64",83,7005.38095238095
"65",84,6509.52380952381
"66",85,6314.66666666667
"67",86,6331
"68",87,5901.95238095238
"69",88,5370.85714285714
"70",89,5633.80952380952
"71",90,5935.52380952381
"72",91,5317.66666666667
"73",92,4865.38095238095
"74",93,4682.7619047619
"75",94,4640.61904761905
"76",95,4585.61904761905
"77",96,4604.66666666667
"78",97,4592.52380952381
"79",98,5018.95238095238
"80",99,5509.19047619048
"81",100,5545.38095238095
"82",101,5570.52380952381
"83",102,6642.42857142857
"84",103,6281.95238095238
"85",104,5579.19047619048
"86",105,6177.7619047619
"87",106,6478.2380952381
"88",107,6441.33333333333
"89",108,7018.95238095238
"90",109,7771.09523809524
"91",110,7487.42857142857
"92",111,7168.57142857143
"93",112,7570.95238095238
"94",113,7856.38095238095
"95",114,7973.7619047619
"96",115,7617.33333333333
"97",116,7176.85714285714
"98",117,7260.66666666667
"99",118,7479.2380952381
"100",119,7162.33333333333
"101",120,6722.57142857143
"102",121,6734.71428571429
"103",122,6630
"104",123,6050.57142857143
"105",124,6004.14285714286
"106",125,6003.57142857143
"107",126,5477.38095238095
"108",127,4982.38095238095
"109",128,4995.71428571429
"110",129,4822.14285714286
"111",130,4590
"112",131,4634.28571428571
"113",132,4634.28571428571
"114",133,4787.61904761905
"115",134,4950.19047619048
"116",135,5018.52380952381
"117",136,5628.90476190476
"118",137,6380.04761904762
"119",138,6255.7619047619
"120",139,6037.19047619048
"121",140,6435.90476190476
"122",141,6408.04761904762
"123",142,6788.90476190476
"124",143,6869.19047619048
"125",144,6356.95238095238
"126",145,6463.14285714286
"127",146,6653.61904761905
"128",147,6511.71428571429
"129",148,6512.95238095238
"130",149,6397.71428571429
"131",150,6617.2380952381
"132",151,6094.85714285714
"133",152,6766.28571428571
"134",153,6873.19047619048
"135",154,6273.19047619048
"136",155,6500.61904761905
"137",156,6401.80952380952
"138",157,6452.2380952381
"139",158,6449.42857142857
"140",159,6518
"141",160,6518
"142",161,6399.7619047619
"143",162,6653.19047619048
"144",163,6260.19047619048
"145",164,6179.2380952381
"146",165,6804.2380952381
"147",166,6751.14285714286
"148",167,6560.66666666667
"149",168,6990.90476190476
"150",169,6975.14285714286
"151",170,6970.14285714286
"152",171,6734.80952380952
"153",172,6583.04761904762
"154",173,6174.2380952381
"155",174,6067.33333333333
"156",175,6419.2380952381
"157",176,6005.47619047619
"158",177,5965.57142857143
"159",178,5539.04761904762
"160",179,5201.80952380952
"161",180,5306
"162",181,5357.33333333333
"163",182,4840.19047619048
"164",183,4516.04761904762
"165",184,4437.38095238095
"166",185,4148.19047619048
"167",186,3595.90476190476
"168",187,3589
"169",188,3614.71428571429
"170",189,3615.90476190476
"171",190,4194.09523809524
"172",191,4347.19047619048
"173",192,4576.33333333333
"174",193,5244.28571428571
"175",194,4937.38095238095
"176",195,4937.38095238095
"177",196,5191.42857142857
"178",197,5533.57142857143
"179",198,5683.71428571429
"180",199,5955.38095238095
"181",200,6318.57142857143
"182",201,6214.85714285714
"183",202,6167.80952380952
"184",203,6524.47619047619
"185",204,6826.14285714286
"186",205,6846.95238095238
"187",206,6975.66666666667
"188",207,7325.80952380952
"189",208,7488.04761904762
"190",209,7584.71428571429
"191",210,7458.04761904762
"192",211,6931.52380952381
"193",212,6847.33333333333
"194",213,6665.90476190476
"195",214,6637.09523809524
"196",215,6637.09523809524
"197",216,6637.09523809524
"198",217,6707.80952380952
"199",218,6657.04761904762
"200",219,6630.80952380952
"201",220,6465.09523809524
"202",221,5746.7619047619
"203",222,5677.71428571429
"204",223,5673.42857142857
"205",224,6052.2380952381
"206",225,6061.28571428571
"207",226,6100.90476190476
"208",227,6407.80952380952
"209",228,6582.80952380952
"210",229,6400.09523809524
"211",230,6277.71428571429
"212",231,5974.61904761905
"213",232,6039.85714285714
"214",233,6138.61904761905
"215",234,5793.95238095238
"216",235,5185.61904761905
"217",236,5185.61904761905
"218",237,5185.61904761905
"219",238,5011.33333333333
"220",239,4762.33333333333
"221",240,4749.28571428571
"222",241,4999.7619047619
"223",242,5391.90476190476
"224",243,5391.90476190476
"225",244,5391.90476190476
"226",245,5236.42857142857
"227",246,5204.09523809524
"228",247,5067.57142857143
"229",248,4865.90476190476
"230",249,4221.85714285714
"231",250,4196.14285714286
"232",251,4287.33333333333
"233",252,4549.61904761905
"234",253,4529.61904761905
"235",254,4321.95238095238
"236",255,4376.2380952381
"237",256,4534.95238095238
"238",257,4642.28571428571
"239",258,4867.95238095238
"240",259,4410.66666666667
"241",260,4181.7619047619
"242",261,4132
"243",262,3547.95238095238
"244",263,3165.57142857143
"245",264,3537.19047619048
"246",265,3752.71428571429
"247",266,3212
"248",267,3017.04761904762
"249",268,3013.47619047619
"250",269,2987.04761904762
"251",270,3462.28571428571
"252",271,3469.42857142857
"253",272,3391.09523809524
"254",273,3847.14285714286
"255",274,4045.71428571429
"256",275,4249.85714285714
"257",276,4280.47619047619
"258",277,4291.42857142857
"259",278,4184.09523809524
"260",279,3958.42857142857
"261",280,4075.47619047619
"262",281,4352.66666666667
"263",282,4394.80952380952
"264",283,4441.95238095238
"265",284,4476.2380952381
"266",285,4435.66666666667
"267",286,4468.14285714286
"268",287,4414.09523809524
"269",288,4349
"270",289,4450.19047619048
"271",290,4178.04761904762
"272",291,3862.09523809524
"273",292,3881.38095238095
"274",293,3871.38095238095
"275",294,3611.19047619048
"276",295,3662.61904761905
"277",296,3575.85714285714
"278",297,3918.2380952381
"279",298,4170.2380952381
"280",299,4170.2380952381
"281",300,4170.2380952381
"282",301,4577.85714285714
"283",302,4423.38095238095
"284",303,4388.38095238095
"285",304,4756.47619047619
"286",305,4869.19047619048
"287",306,4538.14285714286
"288",307,4290.14285714286
"289",308,4433.2380952381
"290",309,4762.28571428571
"291",310,4642.71428571429
"292",311,4739.61904761905
"293",312,4890.19047619048
"294",313,4977.90476190476
"295",314,5076.2380952381
"296",315,4817.2380952381
"297",316,4391.52380952381
"298",317,4344.14285714286
"299",318,4162.71428571429
"300",319,4213.19047619048
"301",320,4247.85714285714
"302",321,4270.42857142857
"303",322,4225.42857142857
"304",323,4455.19047619048
"305",324,4327.80952380952
"306",325,4272.80952380952
"307",326,4688.19047619048
"308",327,4688.19047619048
"309",328,4688.19047619048
"310",329,5210.80952380952
"311",330,5324.14285714286
"312",331,5318.71428571429
"313",332,5487.85714285714
"314",333,5625.85714285714
"315",334,5602.42857142857
"316",335,5535.52380952381
"317",336,5745.47619047619
"318",337,6030.2380952381
"319",338,5963.57142857143
"320",339,5825
"321",340,5540.95238095238
"322",341,5506.28571428571
"323",342,5496.09523809524
"324",343,5394.66666666667
"325",344,5061.95238095238
"326",345,5061.2380952381
"327",346,4868.61904761905
"328",347,4190.04761904762
"329",348,4192.90476190476
"330",349,4192.90476190476
"331",350,3631.2380952381
"332",351,3369.09523809524
"333",352,3366.2380952381
"334",353,3131.38095238095
"335",354,2723.52380952381

+ 188
- 0
hypotheses_modeling/KerasRegressions.py View File

@ -0,0 +1,188 @@
import tensorflow as tf
import pandas as pd
import numpy as np
def time_series_sigmoid_classification(X, Y, k, n0, x_columns, y_columns):
inp = X[x_columns]
out = Y[y_columns]
col = "day"
x = []
y = []
input_shape = 0
output_shape = 0
for player in Y["playerID"].unique():
XPlayer = inp[inp["playerID"] == player]
YPlayer = out[out["playerID"] == player]
for day in YPlayer[col][n0 - 1:]:
prev = day - k
xprev = XPlayer[XPlayer[col] == prev].drop(columns=[col]).to_numpy()
if xprev.shape[0] != 1:
continue
else:
xprev = xprev[0, :]
yt = YPlayer[YPlayer[col] == day].drop(columns=[col]).to_numpy()[0, :]
if input_shape == 0:
input_shape = xprev.shape[0]
else:
if input_shape != xprev.shape[0]:
print("INCONSISTENT INPUT DIMENSION")
exit(2)
if input_shape == 0:
output_shape = yt.shape[0]
else:
if output_shape != yt.shape[0]:
print("INCONSISTENT OUTPUT DIMENSION")
exit(2)
x.append(xprev)
y.append(yt)
x = np.array(x)
y = np.array(y)
model = tf.keras.Sequential([
tf.keras.layers.Flatten(input_shape=input_shape),
tf.keras.layers.Dense(output_shape, activation=tf.nn.softmax)
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy', 'categorical_accuracy'])
model.fit(x, y, epochs=100)
loss, accuracy = model.evaluate(x, y)
print(loss, accuracy)
return model.get_weights()
def time_series_dnn_classification(X, Y, k, n0, x_columns, y_columns):
inp = X[x_columns]
out = Y[y_columns]
col = "day"
x = []
y = []
input_shape = 0
output_shape = 0
for player in Y["playerID"].unique():
XPlayer = inp[inp["playerID"] == player]
YPlayer = out[out["playerID"] == player]
for day in YPlayer[col][n0 - 1:]:
prev = day - k
xprev = XPlayer[XPlayer[col] == prev].drop(columns=[col]).to_numpy()
if xprev.shape[0] != 1:
continue
else:
xprev = xprev[0, :]
yt = YPlayer[YPlayer[col] == day].drop(columns=[col]).to_numpy()[0, :]
if input_shape == 0:
input_shape = xprev.shape[0]
else:
if input_shape != xprev.shape[0]:
print("INCONSISTENT INPUT DIMENSION")
exit(2)
if input_shape == 0:
output_shape = yt.shape[0]
else:
if output_shape != yt.shape[0]:
print("INCONSISTENT OUTPUT DIMENSION")
exit(2)
x.append(xprev)
y.append(yt)
x = np.array(x)
y = np.array(y)
model = tf.keras.Sequential([
tf.keras.layers.Flatten(input_shape=input_shape),
tf.keras.layers.Dense(32, activation=tf.nn.softmax),
tf.keras.layers.Dense(output_shape, activation=tf.nn.softmax)
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy', 'categorical_accuracy'])
model.fit(x, y, epochs=100)
loss, accuracy = model.evaluate(x, y)
print(loss, accuracy)
return model.get_weights()
def time_series_linear_regression(X, Y, k, n0, x_columns, y_columns):
inp = X[x_columns]
out = Y[y_columns]
col = "day"
x = []
y = []
input_shape = 0
output_shape = 0
for player in Y["playerID"].unique():
XPlayer = inp[inp["playerID"] == player]
YPlayer = out[out["playerID"] == player]
for day in YPlayer[col][n0 - 1:]:
prev = day - k
xprev = XPlayer[XPlayer[col] == prev].drop(columns=[col]).to_numpy()
if xprev.shape[0] != 1:
continue
else:
xprev = xprev[0, :]
yt = YPlayer[YPlayer[col] == day].drop(columns=[col]).to_numpy()[0, :]
if input_shape == 0:
input_shape = xprev.shape[0]
else:
if input_shape != xprev.shape[0]:
print("INCONSISTENT INPUT DIMENSION")
exit(2)
if input_shape == 0:
output_shape = yt.shape[0]
else:
if output_shape != yt.shape[0]:
print("INCONSISTENT OUTPUT DIMENSION")
exit(2)
x.append(xprev)
y.append(yt)
x = np.array(x)
y = np.array(y)
model = tf.keras.Sequential([
tf.keras.layers.Flatten(input_shape=input_shape),
tf.keras.layers.Dense(output_shape)
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy', 'categorical_accuracy'])
model.fit(x, y, epochs=100)
loss, accuracy = model.evaluate(x, y)
print(loss, accuracy)
return model.get_weights()
def time_series_dnn_regressions(X, Y, k, n0, x_columns, y_columns):
inp = X[x_columns]
out = Y[y_columns]
col = "day"
x = []
y = []
input_shape = 0
output_shape = 0
for player in Y["playerID"].unique():
XPlayer = inp[inp["playerID"] == player]
YPlayer = out[out["playerID"] == player]
for day in YPlayer[col][n0 - 1:]:
prev = day - k
xprev = XPlayer[XPlayer[col] == prev].drop(columns=[col]).to_numpy()
if xprev.shape[0] != 1:
continue
else:
xprev = xprev[0, :]
yt = YPlayer[YPlayer[col] == day].drop(columns=[col]).to_numpy()[0, :]
if input_shape == 0:
input_shape = xprev.shape[0]
else:
if input_shape != xprev.shape[0]:
print("INCONSISTENT INPUT DIMENSION")
exit(2)
if input_shape == 0:
output_shape = yt.shape[0]
else:
if output_shape != yt.shape[0]:
print("INCONSISTENT OUTPUT DIMENSION")
exit(2)
x.append(xprev)
y.append(yt)
x = np.array(x)
y = np.array(y)
model = tf.keras.Sequential([
tf.keras.layers.Flatten(input_shape=input_shape),
tf.keras.layers.Dense(32, activation=tf.nn.softmax),
tf.keras.layers.Dense(output_shape)
])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy', 'categorical_accuracy'])
model.fit(x, y, epochs=100)
loss, accuracy = model.evaluate(x, y)
print(loss, accuracy)
return model.get_weights()

+ 355
- 0
hypotheses_modeling/fatigue_total_sum.csv View File

@ -0,0 +1,355 @@
TimeSinceAugFirst,fatigueSum
0,0.734457489003587
1,0.485175472200728
2,0.280474592899874
3,0.852899672028512
4,0.067718127997734
5,0.387261248267282
6,0.466815138124766
7,0.763731947890889
8,0.685512083480377
9,0.895108656027463
10,-0.061861871122328
11,0.296321907390904
12,0.623117762114496
13,0.753829250795893
14,0.217870344211225
15,0.53926849046086
16,0.734843942472552
17,0.308469121871186
18,0.167041135359123
19,0.746267880340515
20,0.622788652776368
21,0.214320156058007
22,0.050827857843934
23,0.294246790332209
24,0.186808624194322
25,0.552767197321023
26,0.78454170658556
27,0.779419550851109
28,0.798357373731036
29,0.323129040463109
30,0.145143808515553
31,0.68487764162657
32,0.539874793771553
33,0.36321009791783
34,0.489148204465833
35,0.413236542977807
36,0.395011893731417
37,0.156837050771386
38,-0.285558218459401
39,-0.349698539921412
40,-0.558765836143965
41,0.785941194779244
42,0.260825136166886
43,0.175173324395181
44,0.548662507988438
45,0.252321231106257
46,0.742763716938104
47,0.529872511137736
48,0.37042402851138
49,0.162733394153363
50,0.357107946620677
51,0.012983087373205
52,-0.038997965254495
53,0.241085179931302
54,0.27305103057282
55,0.501283454971351
56,0.352131554149897
57,0.510308182531921
58,0.408416961411701
59,-0.38225962517156
60,0.111784933841739
61,0.610578265741467
62,0.563836579606691
63,0.387038159139884
64,0.17691474265164
65,-0.158440974403251
66,-0.127808846886789
67,0.16767510337051
68,0.232412554117885
69,-0.034039586741236
70,0.369219132420847
71,0.055166081100634
72,-0.15914007575716
73,-0.143815705049808
74,0.309931292450968
75,0.481718700637843
76,0.542695985663472
77,-0.592609862606928
78,-0.087871760090222
79,0.323133612277408
80,0.145926738048549
81,0.266068498675575
82,0.112895006227449
83,0.11774653805839
84,0.289982558620813
85,0.156268777724842
86,0.336960194430492
87,0.211568516622687
88,0.405654888856629
89,-0.080651094767532
90,-0.989503212930113
91,-0.618589483268202
92,-0.106354633315225
93,-0.036704167015262
94,-0.199061758628498
95,-0.165391404949047
96,0.465559713151923
97,0.553712364147757
98,0.10718598664192
99,-0.176520319237606
100,-0.012471746839114
101,0.009238546030446
102,0.031210965670768
103,0.306981488185374
104,0.326512806301329
105,0.389789945787445
106,0.163338392510265
107,0.43041006311016
108,0.064529621280754
109,-0.056642839331862
110,0.360810616073057
111,0.674486235636005
112,0.203177618350165
113,0.016482187327924
114,0.645937951499796
115,-0.635636762766677
116,0.054267298726349
117,0.197678247905769
118,-0.037744729883124
119,0.510184386241254
120,0.456725618316876
121,0.36942720222611
122,-0.305694272607688
123,-1.09191169102099
124,-0.726910533417431
125,-0.43884728739089
126,0.029939288546349
127,-0.061371102729405
128,-0.245528132438078
129,0.009470765956943
130,-0.341784370603988
131,-0.049919365357729
132,0.097143172058972
133,-0.011342193521127
134,0.080421703207904
135,-0.129004417719117
136,-0.270859696160466
137,-0.708904646451948
138,-0.229009026264089
139,-0.181424438959781
140,-0.026564683920245
141,0.219065484118407
142,-0.530439085926495
143,-0.739993065212586
144,-0.078144247328314
145,0.281806254987051
146,0.106323312617401
147,0.461051503524687
148,-0.194960844877302
149,-1.0323471479209
150,-0.221708749633043
151,-0.146094771209489
152,-0.769845305708576
153,-0.006930021908459
154,-0.104647283728296
155,-0.012381788872799
156,-0.19121232166827
157,-0.671529527201817
158,-0.616394224498445
159,-0.073943182938408
160,0.057147640203543
161,-0.346330630873381
162,-0.08655944015928
163,-0.408202837238968
164,-1.28901633269902
165,-1.06200969217439
166,-0.239078707352407
167,-0.069592861351456
168,-0.47050097438462
169,-1.02350760409975
170,-0.382414735756562
171,-0.091983676112389
172,0.016194125135445
173,0.208557925296051
174,0.382631435594289
175,0.054618860565352
176,0.392539314443691
177,0.407308177079325
178,0.5361087576086
179,-0.559118537823889
180,-0.041279425806623
181,-0.811977779328337
182,-0.673110139192779
183,-0.647190032646896
184,-0.264055759125838
185,0.2137206708926
186,0.169532929678205
187,0.572823128029248
188,0.504138498108557
189,-0.025148663593221
190,0.243256165168842
191,-0.028600242158836
192,-0.470973085298726
193,-0.404980167324132
194,0.192687307444004
195,0.249123463461409
196,-0.094501395021819
197,0.330171642369123
198,0.303808916345168
199,-0.225895188174485
200,-0.337185144362268
201,0.184560773992318
202,0.105945063048639
203,-0.268263490373093
204,-0.630585558331497
205,-0.458206840787744
206,-0.498158052976791
207,-0.029702307826457
208,-0.067979765277485
209,-0.656925656525055
210,-0.457529274865099
211,-0.271352185811345
212,-1.11655632791675
213,-0.611166931446556
214,-0.610527376526797
215,0.034382530133085
216,-0.141318464945944
217,0.004511261469066
218,-0.164492395019851
219,-0.056862211997474
220,0.010261586762043
221,0.101082024613018
222,0.372300796539504
223,-0.145874961700409
224,0.060726106209981
225,-0.371059412216481
226,-0.370670092221636
227,0.083119527458891
228,-0.058338986929759
229,0.3362958277405
230,0.519265121699177
231,0.169683826619216
232,0.077486840441911
233,-0.474364612134558
234,-0.247702269140266
235,-0.659595488129965
236,0.262402593762269
237,-0.277905669965979
238,-0.073857678558781
239,0.053602846962544
240,-0.606205677585295
241,-0.624053318548306
242,-0.605867224210048
243,-0.252954297674564
244,-0.101338569851228
245,0.133299355850543
246,0.416469635541468
247,-0.48865392999333
248,0.145411566109825
250,-0.727338523179393
251,0.124104120321982
252,0.276748683294634
253,0.793381770905604
254,0.643063414197272
255,1.04989185847975
256,0.164337667404437
257,0.158175420900341
258,-0.376784412957032
259,-1.13446825563484
260,-0.682110266582935
261,0.03890579636218
262,0.250705347616431
263,0.63083548881295
264,-0.381185476545982
265,-2.09473205252048
266,-1.08423548600842
267,-1.00435109992827
268,-0.96922353115861
269,-0.845113477539493
270,-0.542326267460438
271,-0.154289023678556
272,-0.47818347594847
273,-0.306186227227698
274,0.130165587788887
275,-0.534607431666707
276,-0.024108562029165
277,0.441356603857835
278,0.835974252393425
279,0.504743612581526
280,0.608084130261808
281,0.354286698358717
282,0.149098455139156
283,0.362866649040982
284,0.585107099551978
285,-0.238849557858286
286,-1.29316205044401
287,-0.514971003845922
288,-0.130605271784348
289,0.054601023159174
290,0.712654053152515
291,0.345411091599797
292,0.590827647093801
293,0.226175565336716
294,0.066456432118622
295,-0.244109627769699
296,-0.668607818640876
297,-0.892753779097629
298,-0.864450623997139
299,-0.052629105168991
300,0.097879540271865
301,0.068861223343318
302,0.201303180551096
303,-0.343311264954187
304,-0.149597988654366
305,0.1591975667215
306,-0.128030046827757
307,-0.881761347390814
308,0.110911445153907
309,-0.059481544970226
310,0.248261046579106
311,0.217829171777561
312,-0.168769117743194
313,-0.734302536254684
314,-1.23366692589506
315,-0.425009478665031
316,-0.236956665438271
317,-0.135287941133465
318,-0.214676104364952
319,-0.246512442890127
320,0.020016985976053
321,-0.561102012471304
322,-0.258337553328078
323,-0.360201383670261
324,-0.676758179307032
325,-1.22326716932596
326,-0.552498077877283
327,-0.05938292421172
328,-0.230174146084735
329,-0.178125732975686
330,-0.782218150988738
331,-0.902771605320945
332,-0.83417758585907
333,-0.623742294718358
334,0.029100211618237
335,-0.276998463243979
336,-0.495616972912957
337,-0.16842928848014
338,-0.215205592767
339,-0.400929404850442
340,0.001421286489176
341,0.247646162510804
342,-0.019832667817069
343,0.41449646066539
344,0.305217583351337
345,0.260375893221019
346,0.643585606257478
347,0.587502564497385
348,0.933368991653645
349,0.562934043178896
350,0.582213233197938
351,0.550560856980579
352,0.598280595137412
353,0.739698209165581
354,0.066689402177346

+ 28
- 0
hypotheses_modeling/hypotheses.txt View File

@ -0,0 +1,28 @@
Team:
1.
x - normFatigue, TimeSince
y - normSoreness
r2 - 0.16830803694995466
2.
x - normFatigue, TimeSince
y - normDesire
r2 - 0.1722466278587138
3.
x - normFatigue, TimeSince
y - normIrritability
r2 - 0.09563459728506452
4.
x - normSleepHours, TimeSince
y - normSleepQualiyt
r2 - 0.09717095676693188
5.
Perry:
7 day moving average team workload - normalized team fatigue: 0.0006
21 day moving average team workload - normalized team fatigue: 0.0024
normalized team fatigue - game day performance: 0.0696
normalized team fatigue - paper smoothed workload fatigue: 0.0324

+ 350
- 0
hypotheses_modeling/slidingWorkAverageSevenDay.csv View File

@ -0,0 +1,350 @@
"","TimeSinceAugFirst","slidingWorkAverage"
"1",6,3340
"2",7,4662.14285714286
"3",8,5407.85714285714
"4",9,5460.71428571429
"5",10,5780.71428571429
"6",11,6050
"7",12,6225.71428571429
"8",13,6240.71428571429
"9",14,6057.14285714286
"10",15,6055.71428571429
"11",16,6022.71428571429
"12",17,6087
"13",18,6871.28571428571
"14",19,6693.42857142857
"15",20,6793.42857142857
"16",21,6967.71428571429
"17",22,7458.71428571429
"18",23,7511.71428571429
"19",24,7507.85714285714
"20",25,7335.71428571429
"21",26,7382.85714285714
"22",27,7265.71428571429
"23",28,7190.42857142857
"24",29,6881.57142857143
"25",30,6899.71428571429
"26",31,6846.42857142857
"27",32,6438.57142857143
"28",33,6445
"29",34,6445
"30",35,6207.28571428571
"31",36,5793
"32",37,5750.42857142857
"33",38,5989.71428571429
"34",39,5710.85714285714
"35",40,6025.85714285714
"36",41,6025.85714285714
"37",42,4970.28571428571
"38",43,4164.57142857143
"39",44,4362.85714285714
"40",45,3313.57142857143
"41",46,2486.14285714286
"42",47,2147.85714285714
"43",48,2152.14285714286
"44",49,3555
"45",50,4432.85714285714
"46",51,4489
"47",52,5900.42857142857
"48",53,7505.14285714286
"49",54,7474.85714285714
"50",55,7528.42857142857
"51",56,7414
"52",57,7653.28571428571
"53",58,7512.85714285714
"54",59,7521.42857142857
"55",60,7561.14285714286
"56",61,7610.42857142857
"57",62,7625.42857142857
"58",63,7663.14285714286
"59",64,7692.71428571429
"60",65,7839.85714285714
"61",66,7538.42857142857
"62",67,7605
"63",68,7617.85714285714
"64",69,7596.42857142857
"65",70,7843.85714285714
"66",71,7791.14285714286
"67",72,7870.85714285714
"68",73,7778.71428571429
"69",74,7231.85714285714
"70",75,7234
"71",76,7219
"72",77,5686.85714285714
"73",78,4371.42857142857
"74",79,4231.42857142857
"75",80,4410.71428571429
"76",81,2970.71428571429
"77",82,4127.14285714286
"78",83,6200.71428571429
"79",84,5997.85714285714
"80",85,6781.42857142857
"81",86,6890.71428571429
"82",87,5516.42857142857
"83",88,5910
"84",89,5540.28571428571
"85",90,4386.85714285714
"86",91,4268.28571428571
"87",92,3443.28571428571
"88",93,2926.14285714286
"89",94,3994.71428571429
"90",95,4876.14285714286
"91",96,4146.57142857143
"92",97,3190
"93",98,4790.71428571429
"94",99,6302.85714285714
"95",100,6819.28571428571
"96",101,7200.42857142857
"97",102,9141.14285714286
"98",103,9159
"99",104,9160.71428571429
"100",105,9474.28571428571
"101",106,9688.57142857143
"102",107,9578.57142857143
"103",108,9861.71428571429
"104",109,9296
"105",110,9156.71428571429
"106",111,9155
"107",112,8447.85714285714
"108",113,7577.71428571429
"109",114,7523.42857142857
"110",115,5789.85714285714
"111",116,3093.42857142857
"112",117,3466.28571428571
"113",118,4122
"114",119,3564.85714285714
"115",120,2901.42857142857
"116",121,3102.14285714286
"117",122,4238.42857142857
"118",123,5762.28571428571
"119",124,5389.42857142857
"120",125,4733.71428571429
"121",126,4419.42857142857
"122",127,4468
"123",128,4361.57142857143
"124",129,4438.14285714286
"125",130,4914.28571428571
"126",131,5047.14285714286
"127",132,5047.14285714286
"128",133,6378.57142857143
"129",134,7481.14285714286
"130",135,7591.85714285714
"131",136,8210.14285714286
"132",137,8463.57142857143
"133",138,8330.71428571429
"134",139,8330.71428571429
"135",140,8509.71428571429
"136",141,7275
"137",142,8413.28571428571
"138",143,7959.28571428571
"139",144,5693
"140",145,6011.57142857143
"141",146,6583
"142",147,4646.85714285714
"143",148,4782.71428571429
"144",149,3188
"145",150,3682.28571428571
"146",151,4128
"147",152,5956.57142857143
"148",153,5705.85714285714
"149",154,5663
"150",155,7444.14285714286
"151",156,7604.14285714286
"152",157,7715.14285714286
"153",158,9527.28571428571
"154",159,7585.85714285714
"155",160,7265.14285714286
"156",161,8889.42857142857
"157",162,7732.71428571429
"158",163,7988.42857142857
"159",164,7140.28571428571
"160",165,6757.42857142857
"161",166,6711
"162",167,6711
"163",168,6420.28571428571
"164",169,5748.57142857143
"165",170,5317.85714285714
"166",171,5349
"167",172,3464.42857142857
"168",173,4225.85714285714
"169",174,4225.85714285714
"170",175,3948
"171",176,4535.14285714286
"172",177,4590.42857142857
"173",178,4127.85714285714
"174",179,5383.57142857143
"175",180,4981.14285714286
"176",181,5135.14285714286
"177",182,4152.28571428571
"178",183,3264.42857142857
"179",184,3403.85714285714
"180",185,2967.71428571429
"181",186,1939.71428571429
"182",187,1560
"183",188,1483.14285714286
"184",189,2747.42857142857
"185",190,4782.71428571429
"186",191,5047.28571428571
"187",192,6633.42857142857
"188",193,8409.57142857143
"189",194,8271
"190",195,8193.85714285714
"191",196,8674.57142857143
"192",197,8553.57142857143
"193",198,8600
"194",199,8265
"195",200,8606.42857142857
"196",201,8813.57142857143
"197",202,8826.42857142857
"198",203,8151.42857142857
"199",204,7142.14285714286
"200",205,6893.57142857143
"201",206,6028.57142857143
"202",207,4961.42857142857
"203",208,5379.57142857143
"204",209,5733.85714285714
"205",210,5548.14285714286
"206",211,5098.85714285714
"207",212,5048.42857142857
"208",213,5704.14285714286
"209",214,6343.42857142857
"210",215,5718.14285714286
"211",216,5351
"212",217,6423.85714285714
"213",218,7730.14285714286
"214",219,7950.42857142857
"215",220,7662.57142857143
"216",221,5935.42857142857
"217",222,5935.42857142857
"218",223,5935.42857142857
"219",224,6184.71428571429
"220",225,5354.85714285714
"221",226,5303.85714285714
"222",227,5856.71428571429
"223",228,7469.57142857143
"224",229,7546.71428571429
"225",230,7546.71428571429
"226",231,5315.28571428571
"227",232,5034.57142857143
"228",233,5161.57142857143
"229",234,3862.57142857143
"230",235,2151.85714285714
"231",236,2074.71428571429
"232",237,2074.71428571429
"233",238,3534
"234",239,3897.57142857143
"235",240,3782.42857142857
"236",241,5280
"237",242,6554.28571428571
"238",243,6554.28571428571
"239",244,6554.28571428571
"240",245,6860
"241",246,6680.14285714286
"242",247,6258.71428571429
"243",248,5455.14285714286
"244",249,3959.42857142857
"245",250,3959.42857142857
"246",251,4233
"247",252,3254.85714285714
"248",253,3011.14285714286
"249",254,2924.71428571429
"250",255,2393.57142857143
"251",256,3091.14285714286
"252",257,3413.14285714286
"253",258,3816.57142857143
"254",259,3117.14285714286
"255",260,2854
"256",261,3212.57142857143
"257",262,2795.14285714286
"258",263,2446.14285714286
"259",264,3239
"260",265,3208.57142857143
"261",266,3264
"262",267,3186
"263",268,2903.14285714286
"264",269,3772.42857142857
"265",270,4849.57142857143
"266",271,3756.14285714286
"267",272,3148.14285714286
"268",273,5160.28571428571
"269",274,6097.14285714286
"270",275,6633.85714285714
"271",276,6273.85714285714
"272",277,5578.57142857143
"273",278,5557.14285714286
"274",279,5518.57142857143
"275",280,3802.14285714286
"276",281,3774.85714285714
"277",282,3647.42857142857
"278",283,3279.57142857143
"279",284,3000.57142857143
"280",285,3993.71428571429
"281",286,4737.71428571429
"282",287,4279.85714285714
"283",288,3175
"284",289,3069.28571428571
"285",290,2980.71428571429
"286",291,3007.14285714286
"287",292,2093.28571428571
"288",293,1357.85714285714
"289",294,2751.57142857143
"290",295,4038
"291",296,4010.85714285714
"292",297,5494.42857142857
"293",298,6503
"294",299,6423.71428571429
"295",300,6415.14285714286
"296",301,6702.14285714286
"297",302,6057.14285714286
"298",303,6085
"299",304,5794.28571428571
"300",305,5097.42857142857
"301",306,5097.42857142857
"302",307,5097.42857142857
"303",308,3846
"304",309,4191.71428571429
"305",310,3832.28571428571
"306",311,2930.14285714286
"307",312,3070.14285714286
"308",313,3412.57142857143
"309",314,3716.14285714286
"310",315,3903.57142857143
"311",316,2925.71428571429
"312",317,3115.14285714286
"313",318,3763.71428571429
"314",319,4472
"315",320,4233.57142857143
"316",321,3997.71428571429
"317",322,4926.71428571429
"318",323,6248.14285714286
"319",324,6036
"320",325,6124.57142857143
"321",326,6522.42857142857
"322",327,6418.42857142857
"323",328,6350.71428571429
"324",329,6802.14285714286
"325",330,6798.57142857143
"326",331,6805
"327",332,6575.28571428571
"328",333,5883.14285714286
"329",334,6155.28571428571
"330",335,6258.14285714286
"331",336,5507.57142857143
"332",337,5044
"333",338,5049.71428571429
"334",339,4775.14285714286
"335",340,4217.28571428571
"336",341,3945.14285714286
"337",342,3879.42857142857
"338",343,3874.28571428571
"339",344,3343.28571428571
"340",345,3329
"341",346,3255.42857142857
"342",347,2469.71428571429
"343",348,2478.28571428571
"344",349,2441.14285714286
"345",350,1511.85714285714
"346",351,1720
"347",352,1720
"348",353,1363.57142857143
"349",354,1483.57142857143

+ 61
- 0
hypotheses_modeling/team_regressions.py View File

@ -0,0 +1,61 @@
from sklearn import linear_model
import pandas as pd
from sklearn.metrics import mean_squared_error, r2_score
def k_days_into_future_regression(X, y, k, n0):
"""
linear regression that returns the fitted weights as well as metrics
:param X: x timeseries dataframe (very clean, no unamed columns), multidimensional rows
:param y: y timeseries dataframe (very clean, no unamed columns), scalar rows
:param k: days predicting in advance
:param n0: ignoring the first n0 days
:return: intercept, slopes, correlation, mean squared error
"""
col = "TimeSinceAugFirst"
inp = []
out = []
for day in y[col][n0 - 1:]:
prev = day - k
xprev = X[X[col] == prev].drop(columns=[col]).to_numpy()
if xprev.shape[0] != 1:
continue
else:
xprev = xprev[0, :]
yt = y[y[col] == day].drop(columns=[col]).to_numpy()[0, :]
inp.append(xprev)
out.append(yt)
regr = linear_model.LinearRegression()
regr.fit(inp, out)
predictions = regr.predict(inp)
mse = mean_squared_error(out, predictions)/(len(out) - 2)
r2 = r2_score(out, predictions)
return regr.intercept_, regr.coef_, r2, mse
def standard_lr(x, y):
regr = linear_model.LinearRegression()
regr.fit(x, y)
predictions = regr.predict(x)
mse = mean_squared_error(y, predictions) / (len(y) - 2)
r2 = r2_score(y, predictions)
return regr.intercept_, regr.coef_, r2, mse
def main():
# fatigueSums = pd.read_csv("fatigue_total_sum.csv")
# workMovingAverage21 = pd.read_csv("21DaySlidingWorkAverage.csv", index_col=0)
# print(k_days_into_future_regression(workMovingAverage21, fatigueSums, 0, 21))
wellness = pd.read_csv("../data_preparation/cleaned/time_series_normalized_wellness_menstruation.csv")
wellness = wellness.fillna(0)
x = wellness[['normSoreness', 'TimeSinceAugFirst']]
y = wellness['normFatigue']
print(wellness.isnull().sum())
print(standard_lr(x, y))
if __name__ == "__main__":
main()

+ 18
- 0
hypotheses_modeling/time_series_days_ranked.csv View File

@ -0,0 +1,18 @@
,TimeSinceAugFirst,DailyElo
0,121,0.0
1,122,-3.714599999999998
2,178,0.04346000000000028
3,179,2.1916710000000013
4,180,0.0
5,255,0.0
6,256,0.0
7,257,-2.520374784999996
8,263,-2.0880156214999985
9,264,-1.7032140593500005
10,284,-0.6130256153877235
11,285,2.620463284090865
12,311,-2.076954630427971
13,312,1.0960590427574828
14,313,1.8954531384817344
15,353,-0.2940921753664384
16,354,-1.8646829578297937

Loading…
Cancel
Save