Browse Source

secont commit

master
Sotirios Kontogiannis 2 months ago
parent
commit
aea69ef7be
47 changed files with 453289 additions and 0 deletions
  1. +201
    -0
      GRU/GRU_data/data/X_test.csv
  2. +801
    -0
      GRU/GRU_data/data/X_train.csv
  3. +201
    -0
      GRU/GRU_data/data/y_test.csv
  4. +801
    -0
      GRU/GRU_data/data/y_train.csv
  5. +6411
    -0
      GRU/GRU_data/data_128/X_test.csv
  6. +25639
    -0
      GRU/GRU_data/data_128/X_train.csv
  7. +6411
    -0
      GRU/GRU_data/data_128/y_test.csv
  8. +25639
    -0
      GRU/GRU_data/data_128/y_train.csv
  9. +6407
    -0
      GRU/GRU_data/data_256/X_test.csv
  10. +25623
    -0
      GRU/GRU_data/data_256/X_train.csv
  11. +6407
    -0
      GRU/GRU_data/data_256/y_test.csv
  12. +25623
    -0
      GRU/GRU_data/data_256/y_train.csv
  13. +6399
    -0
      GRU/GRU_data/data_512/X_test.csv
  14. +25591
    -0
      GRU/GRU_data/data_512/X_train.csv
  15. +6399
    -0
      GRU/GRU_data/data_512/y_test.csv
  16. +25591
    -0
      GRU/GRU_data/data_512/y_train.csv
  17. +6413
    -0
      GRU/GRU_data/data_64/X_test.csv
  18. +25647
    -0
      GRU/GRU_data/data_64/X_train.csv
  19. +6413
    -0
      GRU/GRU_data/data_64/y_test.csv
  20. +25647
    -0
      GRU/GRU_data/data_64/y_train.csv
  21. +80
    -0
      GRU/GRU_data/generate_data.py
  22. +179
    -0
      GRU/codes/WideGRU.py
  23. +142
    -0
      GRU/codes/WideGRU_128.py
  24. +139
    -0
      GRU/codes/WideGRU_128_cloud.py
  25. +142
    -0
      GRU/codes/WideGRU_256.py
  26. +139
    -0
      GRU/codes/WideGRU_256_cloud.py
  27. +142
    -0
      GRU/codes/WideGRU_512.py
  28. +139
    -0
      GRU/codes/WideGRU_512_cloud.py
  29. +142
    -0
      GRU/codes/WideGRU_64.py
  30. +139
    -0
      GRU/codes/WideGRU_64_cloud.py
  31. +32068
    -0
      data/all_data.csv
  32. +32068
    -0
      data/aqi_data.csv
  33. +32068
    -0
      data/complete_data.csv
  34. +32068
    -0
      data/input_data.csv
  35. +32068
    -0
      data/processed_aqi.csv
  36. +32068
    -0
      data/processed_dataset.csv
  37. BIN
      presentation/MARIA_PSAROPA_Presentation.pptx
  38. +161
    -0
      slideNN/codes/slideNN.py
  39. +222
    -0
      slideNN/codes/slideNN_1.py
  40. +213
    -0
      slideNN/codes/slideNN_2.py
  41. +210
    -0
      slideNN/codes/slideNN_3.py
  42. +214
    -0
      slideNN/codes/slideNN_4.py
  43. +19
    -0
      slideNN/slideNN_data/preprocessing/dataframes.py
  44. +30
    -0
      slideNN/slideNN_data/preprocessing/model_graph.py
  45. +48
    -0
      slideNN/slideNN_data/preprocessing/preprocess_data.py
  46. +68
    -0
      slideNN/slideNN_data/preprocessing/reshaping_aqi.py
  47. +49
    -0
      slideNN/slideNN_data/preprocessing/reshaping_models_input.py

+ 201
- 0
GRU/GRU_data/data/X_test.csv
File diff suppressed because it is too large
View File


+ 801
- 0
GRU/GRU_data/data/X_train.csv
File diff suppressed because it is too large
View File


+ 201
- 0
GRU/GRU_data/data/y_test.csv View File

@ -0,0 +1,201 @@
target_0,target_1,target_2,target_3,target_4
0.7869667479358955,1.3511670718429947,1.043632041970743,-0.04399033758992999,-0.7579721947386325
1.919662466003874,-1.3171546391860138,-0.12928438520766464,1.2202715199678549,1.3042190280277002
-1.020401098316982,0.14657832273319646,1.4809330711825395,-0.2634082967366841,-1.2026837917037108
1.0967130014391873,1.1090232080562852,-0.2798539070783244,-0.9490109490930388,-0.6573359692980909
3.7979014414517236,1.2334983097747947,-0.2330923033021843,-1.028030425403378,1.1315216769490468
-2.6032454034530157,-2.9202659341569257,0.004344309515055278,-0.8930306649168636,0.8015780751915285
-0.12500404748721552,0.517019057067699,1.0171498647778685,-0.41240662601734734,1.5387906472565007
1.2851099293695831,-0.6685103885939024,-0.607143097275508,0.5068882043481183,0.8373975993262242
1.4307262539826864,0.6379639019858301,-1.0117186784815966,2.0397183345965115,-0.3042447944161506
0.17298785578654421,0.021837761434677524,-0.33082982947824713,0.021475062230692517,0.6690212578091553
1.3449647137737797,-0.8139816222674355,0.2677812253918281,0.2657459475447573,0.22523337022440082
-0.2224478210525979,-0.6383000267116637,-1.2226207282028148,0.5136972929849366,1.417650731526237
0.6625067802881787,-0.1957524842417676,-0.0865305988620568,-0.9272110549239151,-0.22353940113026968
0.9495038666647346,-2.2716461819622027,0.19058472220655123,-0.3111362237014714,0.7330765732530259
0.93687802657188,1.8132402540946781,-0.47633000075326254,-0.8213911081381552,-0.27420373079409605
-0.36766115715140685,1.1233989564187352,0.20298539239469113,0.42049830447361564,-2.0545584220051034
1.9062461269634792,-0.06565634166377454,1.2717731675309862,0.6372637238108958,-1.4193120295209984
-0.2380873297536308,0.5972757358440824,1.213058537980468,1.062125165025216,0.5358728571930832
-0.4752550790238565,0.1822050404052423,-0.005450160043708976,0.2637158030597918,-2.0249733202044005
-0.3582724246467219,-3.293738433936468,0.857415006014418,0.19930825315017614,-0.2678241822082587
0.2948456570928055,1.2466979859221128,0.5031155347417826,-1.4074065335931694,0.40828570997389135
0.229265845435999,-1.3693494362401752,-1.8604431991759744,0.29456089693940357,1.1778841419605481
0.43123890228802364,1.7877335535676442,-0.5586236693582689,0.24470507234496316,-1.3452647134133935
-1.2448586864429378,0.08191150149150607,0.8136864997325043,0.19300969157903167,-0.38748238482819714
1.3893656826989984,-0.31836432523079283,-1.0281827361753058,-0.22417007936268796,0.8682090192158279
0.30022122697499926,-0.9869041159300813,1.4868781254322048,-0.08339007133658467,0.960370233774013
0.23126927395618335,-0.2596473998419014,0.6997985576979319,-0.6976972922449242,0.0073393971578415255
0.6765433625943129,1.3240614751178876,-0.14260830402137337,1.6596064112853883,0.1705507649979383
-0.27864410740410406,0.2880096796348037,-0.9981495854451391,-1.1428407574902233,-0.16983255408990136
-0.00341219179075505,2.0117746558532397,-1.615497460733127,0.19860000662532712,0.45910999030470884
-0.661737885378015,-0.08472558319553997,-0.35810937330685155,0.46032159446188553,0.258025432122608
-0.708755299900034,0.6617387151871869,0.07346229785841414,0.28125060348864583,0.40990366000352313
1.1139228675823158,0.5518650037072953,-0.8744992635671003,0.4970745326782553,0.14339302197002263
0.06602665180007883,0.6659559821120972,2.1164587698942685,1.0694972101542264,1.5243219634470597
-0.050767603348747185,-1.5982058002512178,0.1521043342117068,-0.22502314599974582,-0.28051808508437887
0.881738618389432,0.31014732686473334,0.15748063287626782,-1.5806671414507099,-0.6605391406229416
-0.4126241256472927,1.6107710491812148,0.919475603533309,-0.4973561431650348,0.8741105946735663
0.6097293621327365,1.3888391772012272,1.295602646399941,1.631713432284135,0.7820627849902514
-1.1730067867393028,0.012263625695506592,0.04392014818902153,1.5944069383339268,-0.49564547924681024
-0.029233783020270257,0.6381212023605817,-1.278932987791233,-0.7848361988760341,0.047077666430072405
0.13420015829477724,-0.3991105182217955,0.8708812150599688,-0.23587105932261693,0.7296823925237753
-0.4514975677377576,1.3040014154564084,1.9626529640101982,-1.3422262399973306,1.0328718257043275
-0.14214738303544422,-0.7235901149528498,0.01278482391723433,-0.4572504703354334,1.4108155159097044
0.6810466121855319,0.11809120187941699,-0.6999319911897095,-1.5510551013030216,2.259981220251423
-0.6309243533681534,0.8669976281008246,-0.5740872209066762,-1.2171579979965041,-1.2611315444477134
-1.2104685723853839,-0.5391517680449869,0.5366421953975408,-0.27453037693224874,-0.7705115912603194
-0.2528282210137308,0.4879555828177167,1.8246065597505838,-1.2600700315162956,1.1818716302251673
-0.8357966280395658,-0.7972253015891226,0.1405823407838141,-0.058521568045422516,0.3498092322933715
-0.4022710845165299,-0.7736378863543053,0.6497335105033435,0.37770652772108815,0.18626329549719262
0.2903094982156556,-0.2092815782767661,0.5399402549617115,-1.3109128420208278,-0.19696856926612258
0.16099243573149774,2.1910851271254415,0.6555821581439122,0.7221912661586765,0.8556853177481208
0.4927186513400594,-0.41465334513314067,0.8537403948736569,0.2535623738499396,-0.8091224434771153
0.18412558444752267,-0.7413699606238083,-1.0807277683136731,-0.3952743915989275,0.2137161149817559
-0.2721575718156215,0.48122269885010743,0.17976194936834466,1.0653665661135467,0.5145651055978183
-0.10812045358448066,-0.6152636301480167,1.1088490421556547,-0.3630232365761531,-1.6453418702208862
0.30989381773907504,-0.9772194742076584,0.8330392983882497,2.401063413860873,0.17485686435724723
0.002251344683906626,-0.24903733670744585,-1.4388662170329067,0.7835042208163933,0.765187625163552
-0.6092908364083998,0.4596199577657067,1.3480213699932388,-0.71927917673321,0.12806670881468793
1.2375277628357984,0.5105906023174962,0.17035863621981714,1.1453054225410104,-0.28159333977425927
-0.7480468118837948,-0.3377924398542954,-0.2066800653454986,1.4275799944732164,1.1720114096478074
1.3684170935414008,-1.2242532274713906,-0.9544012323655343,-0.08074802881770485,-0.016585895781366333
-1.3218970305130808,0.39611564244980846,-1.1581109987102338,-1.971406270447768,-0.5576955367162132
0.031697075473039765,0.1942175861238791,-0.07890597645156708,1.1820447793077171,-1.0426776996605476
0.7444556729241019,-0.01812509307418786,-0.7366798715733808,0.13939815208458495,1.1213200544813295
0.4557906452875493,1.1461416625282865,0.5474152241449073,0.20667814687347125,-0.14742898728924914
1.5991799716609751,-1.0853881441561437,0.9097457640016047,-0.01563679397736139,-1.6115582713590428
0.123925322084848,0.5313476856556083,0.5054628335099386,-1.6236672212183383,0.051694154848553345
-0.7559030788771549,-0.6417316451569731,-0.7072689954372778,0.06372456403722479,0.9810897238327085
-1.1766018687056625,-0.44084487739450806,1.1452961255631693,0.37101860252940266,-1.2007382719787743
0.4616881347385537,-0.8634900726705821,0.6740005863075677,0.5349223704727134,-1.2003549445925816
1.1766130956797425,0.5492855805944296,-1.7601493185561832,0.9445730438443267,1.071407405548414
1.1923583262825443,1.5541859898614667,2.081564286062659,1.4122481628610202,1.5953508931045932
0.3988126242705369,-0.805577080065805,0.775592671565412,0.14505085183591823,0.21021575249457106
-0.6067445324039747,0.40056935405631194,-0.008989489972519445,-1.2071751540826352,-1.3413686091708168
-0.6230008337064934,0.13795963571779118,0.3408752208560662,0.09110210288815794,1.0752159921353206
1.4273180495848865,-0.6121075058578861,0.587029751344402,-0.26727417033223577,-0.7457784225347268
-1.5892680338622271,-0.8643285477664401,-0.48950193641032813,0.6054404700365573,0.34970369770575815
1.16594739213487,2.0550072756939772,-1.3284835375233177,0.24511483354945532,0.1507515872890063
-1.2339296597855491,-0.025308865808574922,0.553190168718648,-0.7683133666589859,-0.2568992498065698
-0.2580800379238087,3.022521024429826,-1.4826327324160313,0.44927302574642786,-0.9482175827115887
-0.8642496296668424,1.1888586501043226,0.687381635462356,1.4067928842882567,1.1621558415607767
-0.4281605050770357,0.1573370780323909,0.6827882879565368,-1.152959488998679,0.6800793250487186
-1.0528085603345356,0.8251721012639517,1.2429071765334054,0.2936016057802807,-2.089557025204758
-0.029070210510333608,-0.03367451849233153,2.4582405589385674,-1.0987082652129476,-0.19801017510713695
1.3875498224648752,-0.9053378261127458,2.478799769820193,0.9348731367347153,-0.3791092412722379
-2.257548232325791,-0.09229395122509348,-2.7183048243438486,-1.0188925326142,-0.9906547366206384
2.4046635687974556,0.3866248196125132,-2.4424568596877037,1.7003304989616603,-1.057856961278591
1.1752612736894852,0.12586932925274968,-2.4496128467184333,0.8158558059474583,1.3017415811345028
0.3599428773221332,-0.7695489959568679,-0.260694365024315,0.6792127067866299,-2.8826261751358384
-0.6791056167148101,-1.0852691719632772,-0.7862106906203415,-1.0003894327033072,-0.3725759351301373
0.9595475129606471,0.5946413200917336,-3.1744048893274845,0.2027665286561337,-1.4530906111819464
0.7401552470650681,0.3224710162212167,-0.5060100690867277,0.32532823743669903,0.8551128638498741
-1.835755507985024,0.25328082064825336,0.8611593412703267,0.357081522818157,1.2149365329540207
1.3310706463892301,1.3712915400119177,0.633735026001096,1.302146448597435,0.7514938681397276
-0.24396623284722768,0.3078995765513938,0.9064927967727071,-0.21402702826486295,0.0915717863542269
-0.4113176554537358,0.10184509393275928,0.05017109968484932,0.22009955468839074,0.42692609184817093
-0.6075707181841686,-1.276078859587253,-0.9155214014804718,0.2934664855836367,-1.4723549201387183
1.2001644718252373,-0.7559830506573381,-1.0242260600422066,0.5097527751121783,-0.5507659341364693
0.5294156539468191,0.9127196195565819,-0.7228405381562987,1.8546529420011852,0.14177594933993112
0.06991816550352162,-1.6270711053166456,-0.869822229737175,0.06171960697307137,-0.09702817810294029
-0.34723630894055973,0.1802192595245964,0.6162685139164057,1.3597691409018624,0.3350838565586239
-0.3968984531211823,-0.031721451798286664,0.0896711824565615,-0.9168961395944873,-0.2679321814264308
-1.292057173360892,-0.7337794282170333,-1.16552468311749,0.4411639952481523,-0.19335067780675644
2.2568388396240167,-0.7020131103106458,0.2900708021453522,-0.029822286526601866,0.07483617743311878
-1.0120648911632784,0.026016891035505325,-0.9613101914248723,0.5900397711014137,-0.5388443841195671
-0.18451852612788033,-0.31807615351665947,0.009623026927211323,-2.7645850095946503,-0.5751115721309877
0.5339869770532077,-0.7797337735562271,0.5795176155242349,-0.3025275303607118,-0.8351178617693564
0.8104015330532224,-0.2867203546176194,0.5056624965273664,3.299900589213783,0.48153210868202817
-1.0604879155916715,1.6311013698842791,-1.3525909666408815,0.2695426555388759,-2.097275367491091
-0.12361308875886338,-0.816095198826351,-0.12186887738288957,1.5326598030088594,0.3703664252316911
-1.6191161843738442,1.2272743683828564,-1.385817379370012,-0.007516700895942018,0.07471107239928917
-1.66618236844757,-0.31500080006701026,-0.5734247469363438,-1.0666621409819046,-0.15239122490922294
-0.26607148160596666,-0.26403251563293845,-0.050960551877394976,1.7095210296409564,0.09066972238691186
1.3965886154516338,-0.9890748974480608,-0.43834546833334576,0.1778756104755628,-0.36220247002208533
-2.1446251589139442,0.3638822765413915,1.0525124497383391,0.24653682975824306,-0.7209640819048728
-1.255659470914992,0.16400504308998715,-0.8639421142037135,0.2885566155532434,-0.7606167201807262
0.8526750966643891,-1.210309514810404,-0.36118905362510145,0.798792538069131,-0.7301969206108068
1.601393185419995,2.217450418859361,1.4096651959308812,1.4565406603473379,0.737083106937554
-0.09674003616222321,1.6131671910794187,1.6014410810751036,1.3136359540747748,0.9287663390056625
0.8805518426009127,-3.1920179465428773,-2.147433437124625,1.554024818849726,-0.6662013711801621
0.13879354588489157,-1.016388259887419,-0.7841008677474284,0.19327921874235765,0.537648486537709
-0.9217304825141562,0.05440300502507475,0.298308483145663,0.5413850474812465,-0.15499213598080008
1.4837415443014632,0.4773285886814539,0.04102601219458419,1.7999954006998193,-1.5796330897289934
0.1359176847508649,0.17051871621074405,-0.5901102032183716,-0.997034447352591,-0.9616373347092616
1.5900810348046621,0.3497159243898496,-1.736152675802476,-0.4971837326750286,0.08159739239511474
0.2063228552631873,0.40524567245896864,0.5529449911524259,0.15784525019660842,2.7097288692454384
-0.9341662971025343,0.9151318926186438,0.4454463248032926,-0.5571980255115456,1.0888253004663644
-1.5925362559425085,0.12419673797572071,2.788667708791732,1.121717279028221,-1.213019660805421
0.10956808155315967,-0.20264569125459178,-0.7191732053165725,0.03594911810522181,0.9407294400880324
-1.3736851277155266,0.9808683258195098,-1.0634952191210745,0.7206178546983936,-0.8127840334764315
-0.39557524410458705,-0.38539594984579956,-0.38370993798298814,0.25308245880733476,0.012992601015910123
0.22392918498013073,-1.0404508172277134,-0.41602466004099714,-1.0317605030802266,0.78270422669562
-0.026294437853934097,1.1902184268252354,-0.2212868432887694,0.5752996839210449,1.1705469845801915
-0.633441853082786,-1.7192858470674923,-0.9856384538004425,-0.5543143780167916,0.7321054829361096
0.8105171046864541,1.2473746231841827,0.3494898142851101,0.23383461773483144,0.5252753437088573
-0.055111441753046275,0.8250186755184692,2.443249847142972,-0.4735709575775899,0.2213514371413006
-1.1109490002361468,-0.2631042281563953,-0.8218641708928749,2.8495162698349046,0.6266224971108918
-1.6758866157828451,-1.3657016541544715,-1.873771814552137,0.3325671812052499,-0.2855346269856479
0.3332700300879579,0.7721747509794974,1.4184548991970518,-0.32496357749644234,0.16739099165152801
-0.07645802980982397,0.21184334191281567,0.44160811927922505,-0.7344702078213993,-0.27836255850769487
1.5115519365791643,-0.24671562540280537,1.1147820829352308,1.146191353729947,-0.2591315139498797
-0.06884319116307282,0.7865815756867689,-2.3810459006577895,0.07908511465117632,1.0846588318296801
-1.7830729505129748,-2.312275040543988,0.18496788401645844,-0.6950432198871691,-1.328794448192229
0.5586066793827192,1.7644795350454396,-0.26959122610189284,-0.6661286894390125,-0.823849853624965
-2.0986626339760828,1.0671080873770769,0.22888876859626822,0.21233475605980892,-1.1805361549671225
-0.2507324454804735,-0.1984055530252641,-0.45248565718840356,-1.2255165517315716,-0.666357589686795
2.104627620131313,0.09696210504891513,0.8115131000928348,0.6394636499811961,-1.797920871438382
0.08377960420523292,-0.5423222085571516,-1.6306708135534582,-0.5407672586052655,-0.08750280934730695
0.08023500262981478,1.9675086455031958,1.2761934871210479,-0.8200390892293983,-1.7790377785398495
-0.14898141899170694,0.2808676930353202,0.11192191503949793,0.40081243116429366,-0.03312968888622474
-1.8394935372118353,0.12228492284735383,0.9433828019101055,-0.6553240776454404,0.8549139891998829
0.5189207050061377,0.7115710292913138,0.41119483193099665,0.7104413824991445,-0.7558752644123189
0.03725700793633745,0.8467501490132376,-0.3656885077501991,-1.4677854138955229,0.3705849471678632
-1.965956566958489,-0.9102464902013172,-0.48042718791606265,-0.8226583239903078,-0.21536754839097683
-1.4340837328180744,0.37497996659409316,-0.45083713994828684,2.670233608712337,-1.6601090598534334
-0.07274801920882654,0.4765442825013412,0.835589790886942,-0.5201172493212464,0.8271639441049194
1.2823721148404308,-1.799458024538456,-1.5666723998280212,-0.11836906511054054,0.8228277186032485
0.012391950275439855,-0.2988921864830138,0.10199184016624409,-0.01079910990833221,0.5571931531217109
1.596145292667364,0.19383655259926,-1.2104409117559602,-0.80451344471489,0.6929899831172248
-1.1103064998579897,-0.11864645250209302,-0.8176039036668791,1.8818912893185644,1.6431342803342177
0.06757622002883575,-0.21690400003298324,-0.7801379285771742,0.5735809350332421,-0.6057886450945829
0.07280583898413708,-0.0947988408786809,-1.7614264730852305,-0.8867893493640228,0.596409752206303
0.3112803899831073,-0.5102612320298715,-0.13208501433683126,-0.6733331187145069,1.2071627727413607
1.56610113242054,0.17191609737198021,-0.8941962549549889,0.19855357624851594,0.8320434564326906
-0.07398929152264128,0.7092673374518839,-0.18742305988728883,0.9587179436354515,-0.588312048281667
0.034584677073163324,-1.356274051372943,1.5584742787696466,3.062613647922078,0.04590802910533239
0.4033991366532936,-0.20249365135882372,-0.18810839293364381,0.49897594908896536,-0.8157197816248392
0.6094796095231109,-0.18621605741388922,-0.17070006488418918,0.18577272199647624,-0.5449700991101655
-0.962087130277513,-1.3676711107969914,-2.6040572214933246,0.3590373603854688,0.6533810036314632
-2.4066100225557254,-0.41780080665140895,-0.8578366847591539,1.3061280471410788,0.2962367732322589
0.7878366652058655,2.497892240737606,1.0514089942405687,-0.2641610153027865,-0.4589865387921452
-0.23879057755414232,2.1853922562689863,-1.8806172873924298,0.06778897536706166,0.3676556052294023
1.4381815155750275,-1.891083880725767,-0.5238592337039959,-0.8551196414576946,-0.558620075685301
0.5037827302084152,-0.33756914192181375,-1.339975119013013,1.1169414479130357,-0.08466661164228825
-0.42429351057580966,0.3592826422515909,1.575552473951134,-1.3898539822246825,-0.09596748168201982
1.0740508408264495,-0.9285663562412301,-0.6189604926138859,1.4096036119007067,0.8572771985629952
-0.6242740602002067,0.10917857683323405,1.5480374317965273,1.6442738170019482,0.1114450862364403
-1.4596695850346502,-0.6452404865366088,0.4211800587716606,-0.9511205206296202,-0.011612106055365409
1.2447950775894108,0.48020999736946746,1.9222380838029969,3.1237169010616075,0.44700872528988345
-0.016228067610124516,-0.8738098647280753,1.6923524595808979,-0.17239600869514649,-2.1040689741305907
0.7734136401260007,0.158951010547572,0.048780829502228444,0.6232792725917818,-0.8888758131476262
-0.048274735677002995,-0.7206687532592967,1.1108490542564367,-2.76622244468887,-1.530279389233681
-1.279945613732467,0.0934929088962223,-0.20762325636289491,1.3946445793239293,-0.779896285194772
1.3953491358762933,-2.091550638120859,-0.7058620183677939,0.9031470895756879,2.0219234390377077
-1.1139840725792802,-0.9459381074424672,0.17465761338017616,0.906336984023726,-1.3029596746114287
-1.3054036865916487,0.04314388213875839,0.18680882992189732,-1.3610710135439907,-0.6317721349854794
0.3556719521609641,1.2331497077040803,-0.8223137206060206,0.16766704337545899,0.0684879340665697
-0.36640184738393183,-0.10439711107512407,-1.0212654570929136,-0.5514754536252315,-0.08077261234839812
0.47846921797102826,1.2058110744878752,1.9021299862807326,0.5215247258106228,0.09866549385684066
-1.254437385768536,-1.872037184928484,0.18278819940058838,-0.7329740038736091,1.2016378805432124
0.8716114796906514,-0.507425055894722,0.12198791846066288,-0.8987260196720821,-0.7662020584375783
1.1592561681505333,-0.7722473208058968,-0.7466132344855563,-0.27633815225233116,1.496064631126644
-1.9290248765478717,-0.32566612333400613,0.03049514331245505,-0.08641918596426182,-0.07333752389632889
-0.18700707213482864,0.27114296831762524,-1.2944764845682994,-0.9657975134375585,2.19584459401727
-0.6255242810637359,-3.2002666994992572,-0.2905393708092166,-0.043241916053216596,-1.377059751456991
-1.2318760006871383,0.9252713524116873,-0.6919887348563057,-0.7949537752126224,0.13047736788863096
-0.8524883487567315,0.8142897124027236,-0.11224803947400873,-0.03708471939277589,-1.6282669167389625
-1.277662202907984,1.3072034879979513,-0.8925554516634084,2.721458352629073,1.1333875002530582
0.23330779092684642,1.043914150357934,-0.36933208784271077,-1.584063951718069,-0.724624317856265
0.41621847723462135,0.17083991916715371,-0.12843021792126363,-1.2187638896063837,0.03808992581439088

+ 801
- 0
GRU/GRU_data/data/y_train.csv View File

@ -0,0 +1,801 @@
target_0,target_1,target_2,target_3,target_4
0.5052974558224538,-2.557321324015965,-0.4747177070565554,0.20325900172860473,1.6694962056330478
0.7099438290019638,-0.42199763096378823,-0.6217426680249971,0.22501373788405274,-1.1355256282346897
1.2011375448339738,0.06039987996061348,-1.2585216713070189,-0.9559066491755891,-0.10716045938834302
-2.1902772740231935,-0.814691363542807,-1.1784866453926797,0.6513768414048463,3.352752813583809
-0.49725792160027016,-0.6656791427129928,-0.99132493589445,-0.7954529333266572,0.8274700239295174
0.4853139590173264,0.2919985872466285,0.639873494077039,1.4060546307915516,-1.285513115884796
0.47600050580065734,0.9815061056499813,-1.1485485287777795,-1.9808802345868817,-0.6167929946579808
-1.181582922989699,0.4468990686538953,0.9431792268657516,-0.2747333247317653,-1.204109809854467
1.6763864188369657,-0.21316361901927725,1.4576714630543721,-0.35634644318998965,-0.2985876980755464
-0.7590825376402701,-0.8144830285843696,-1.4509115958231655,-0.06928037613449738,0.4028423088806952
-0.9331747923341781,0.09004717680983668,0.9323861915604909,-0.23045356487463298,-0.37930794265738743
0.02745879312967276,-1.0373659464722897,-0.6397751712436349,0.4896348297043438,-0.9883298993702896
-0.30049800492593837,-0.42538370564471467,-0.028243712950360815,0.1357362651327039,1.3445621442772384
-0.9377152590657913,-1.431949061956249,0.19315696029565868,0.5975589204700434,0.038689451541995004
1.2316457398857343,0.5323003707863596,0.56190925835473,-1.3417444164063907,-0.6754356364860585
1.2361042683578374,0.6295833650735126,-0.18325226367504416,-0.028013532726756676,0.8419180188417453
-1.449516663475485,-1.1012649038545546,0.3395792840520382,-1.4904080013229364,-0.5365469659714104
0.1784328264489283,1.246597406884101,1.9719703301425953,0.1840648365061548,1.0131945950571812
-1.9001622567921004,2.030197609825587,-0.16631897763937667,-2.2177980463592872,-0.2681215523135785
-0.4743170497590586,1.077792027748821,-0.07685902854084463,0.854481868671533,0.9092937162965419
-0.37411279129745445,-0.7690564757534287,-0.011628263097073292,-1.1769556402286354,0.5771526214931195
0.7572923897330037,-0.6586914887979903,-0.9527381523813689,-1.9331779585826958,-0.3165517917041675
1.7141988944802884,1.6724181690401114,-0.115282252197721,0.2753121529570055,-0.5714713285415993
-1.1360355745962765,0.3778906605845351,-0.8488444911290451,-1.1538786515191082,1.6573338107455766
-0.9863278703629672,0.5462540495260428,0.946339659980072,-0.14994884770177885,0.44715504905246034
1.1289992114130591,1.0047234249325943,-0.36854351979340555,-1.4810979582660393,1.4192255993832534
-0.843697307325157,-0.5661145698583907,-1.4358566731872011,-0.34961398907972013,-0.20183331134178747
-0.18655547702698827,-0.9980514980681365,-0.6261472523152622,-0.5529820729991416,-1.6221200145064891
1.1531789058186974,0.8676007870339076,0.6418132081675185,0.4260775487734013,0.9668528338468689
0.25608273176369545,1.7041450877998567,-0.8891627626512153,0.4126201854300654,0.8284584568225755
-1.1311915129384507,-0.2173652908917217,-0.2660173872093807,1.9137086745546892,-0.9630830623288876
-1.2452244962511845,-1.8511451034890498,1.3968170766556396,0.3667803102248546,-1.002138721117638
0.33185062891738976,-0.3514072621777066,0.0983498457586729,-0.1840265752985832,1.1767234543576492
-0.5842046852596541,0.5289058297287648,0.32701178783027,-0.49791260414705407,1.4468718167333836
-0.010858328124901223,2.576057643386676,1.461801647222104,-0.9394008880493085,0.06132741061158838
-0.9544457701324698,0.4415879416727764,0.12057601890139011,0.005241825094671722,1.0126027955587518
-0.026899022585331996,0.1879715858204723,-1.4280763392745695,1.3305376638607538,-1.1935409652324072
-0.25508208318841347,0.9216497959242719,0.8694794166932991,0.22097996099887923,1.7493904963253437
0.2225913168872466,0.31362679063372595,1.1301135187861704,-0.6450351474787062,1.0633067300315033
0.034863228755490444,-0.3381172584923287,1.1133161633815332,-0.050967456254838533,0.06541649907145589
1.7879333852599624,-0.6468245219309768,1.2498324495722748,-0.014182649227245917,0.4865652838574298
-1.3225720663128775,0.3972025378619645,-0.8356139057890758,-2.666525457675429,1.0208525909596329
-0.28597406282998045,-0.31279698398381645,0.397206050055984,-0.37640554995824654,-0.6143732508217659
-2.159211147798005,-1.6623427064768972,0.02710437693226807,-0.16407847340539905,0.3243980397330015
-0.4771123277659553,-0.18963868943171747,1.0003230987283898,-0.6174397984118477,1.4357173712676639
0.043355277834749684,-0.8595818825394477,-1.0081424955854574,-1.4577029046763914,-0.45269798817657914
0.5812266646711495,-1.6499878000674693,-1.475270422900634,-0.9180733061194862,0.7807039959393917
-0.3700567853903004,1.4127875511170627,0.2957788300583716,-1.659872351965206,-0.17099247599470047
0.6664350583206265,0.6331465505514301,0.824915763862727,-1.7929719687841945,0.2611941589501074
-0.3928781443850514,1.07227005095488,0.3212592727274855,1.6818410876438992,0.8152860187999397
-1.0689967242902603,-0.47616073374687157,0.310579103148321,-0.06285164820880695,-0.2043927514873893
-0.044899611208937276,0.5470062061790324,-0.45176730066609955,0.30679423834194275,-1.1340205267001682
-0.1332034745372662,1.3660623167774764,-0.3160471938457196,-0.9795433814121149,1.6286191751775203
0.7159915969578562,-2.154312495756824,1.4432248828713297,0.7914267024184093,-0.42535607057699404
-0.18855659572237102,0.6560258596721168,0.5582666812917658,-1.744549672343694,0.11287032152945718
0.14741572963401278,-1.3479380752161119,0.18084849555055332,-1.895456357253583,1.0372502775035806
0.45584402239233973,-0.5323174439919804,0.0007315147367546254,-0.4861138101589612,-0.9767919070913744
-0.5212852959180178,-1.7179178219830904,1.0162358533603764,-0.9302392303667889,-0.5665637804912974
-0.35182832932203695,0.09553076613918157,-2.137620433753524,-1.3219111259993612,-0.7746108391623913
0.5533707311701894,0.8356162233775959,-0.35808396985526925,0.7776785621013507,0.053503601880296466
-0.44556160902342345,-0.08900484665280047,-0.7711544438953242,-0.7528697030143713,0.027874227796641962
0.06416130741187391,-0.24384516429598604,-2.7616247833865035,0.40522798140029465,0.08576807592089301
0.0635011464718255,-0.04809003095881224,-1.7247556091554488,-0.27741781960704204,0.0970254777364967
0.387509012579921,1.9016559011456882,-0.11004509022065093,-1.1081068824422189,0.7704560338082095
0.3804904287715686,0.7908595003433018,0.8560086168391253,-1.823481173416279,1.3954335077596478
0.05717631686718895,0.7797136501783207,-1.5452770072780426,1.0097646059726983,-1.072504867843125
2.9062518288507144,0.47164430484232234,-0.6543045096114248,0.21831527549329272,-0.39167237457494375
1.0208231918029171,1.406779846081281,0.001388300602134185,3.0859409084012466,-0.9413150586449033
-1.2345594038189607,-1.9917850081884416,1.4580927121045868,0.19177967802656257,-1.1808300708137989
0.6121478880802756,-1.2727167607313932,-1.0687686133871022,0.7514336233387892,-1.1299061440032954
-0.39738277117441206,0.8844082260726792,-0.0665727156592217,0.42634749319659593,-0.006133097268795518
0.04764869289727288,-1.4838392831804879,-0.677679088096116,-0.4812604203187894,-0.4598488524481581
0.5885175914643208,2.5124118206968316,0.4426859998912335,-1.086493684642523,-2.0688157843114445
0.31575089658874694,-1.3230931442192553,-0.4797972460940811,0.9815513152885134,0.05785996770568384
0.4227693384411941,0.20132528410360964,-0.3594388493589211,0.3458128061701954,1.04043994864073
-0.16920171982645968,0.18862898806202666,1.872694862807236,-1.875219353210711,0.9374607948010993
1.2820119981556977,0.31181496264043274,0.34899561434822923,0.41376972127056266,1.230392416833634
-0.591854929342545,-1.4528944550867986,1.4266670614121932,-0.4578770612266811,-0.8424473830681601
0.30923333054902563,1.552859718292847,-0.6286686758391384,-0.021368678351497507,1.5765421151019248
-0.3395016483715016,1.330008891627248,-0.5192871596773411,0.17085937857182212,-0.09292122937409752
-1.155345850863213,-0.4121113326393935,1.5434137289884649,0.6374150133641487,0.22710598523536402
-0.018982597155370044,-0.7280808602952414,-0.1251440763242837,0.36678214894831124,-1.6124505484749359
-0.29916413291977906,-0.7773876953059101,-0.6847382948087385,-0.30976731665030105,0.7222426563408233
2.5477671822543813,-0.49957727532483615,1.2789342405145954,0.9081278378329338,-0.42311590122891857
-0.05363598602073026,0.9734916958295743,-0.2649263008590644,0.1250038460065673,-0.6471577609542164
-0.7756252508281045,-0.2850263599393557,-1.114777744949091,-0.140794227170927,-0.048158654133624496
0.8678173331257263,0.26925175918514976,-0.821748873711279,0.7104860985437309,0.08982185506921178
-0.5623855025043605,0.253359893288216,-0.5085412932830913,0.11557149621933412,-0.3767941044304855
0.6414047521036188,-0.07671583751029475,-0.09687875261723303,1.78150752984833,0.5569684478269702
0.17349814819349393,-0.8068271230137486,-0.9830014454351658,0.35679547242545795,-0.34643412568371523
1.1522745144559405,0.2505971585676581,-0.10607637706054114,2.576449666258462,-1.8698777389328147
0.5774930566423383,1.7643328875425428,1.4718696217405765,-0.616906674766549,1.9315788910438176
1.3430851250096725,-0.13458236871506893,0.3343864916178476,-0.8411911314652355,-1.2999819194906495
0.9793076925591704,1.1764117929224336,0.617864294014802,0.15235256488741977,0.9020163427610998
0.09229818053929109,0.2706633353768243,-0.5163826924956926,-0.972890326362451,0.08193079470387234
0.5574778914847136,1.4744421049814245,-1.4701673692744026,-0.5918501470843807,-0.18944444631258792
0.4363244064213065,-2.5741050039429205,1.730057562324467,1.2232602184918395,2.6847147618809983
0.12007611643154557,-1.2539990181721343,0.9873043645607565,0.20524313758408708,0.9105672386882433
-1.033620231039924,0.434299899457976,-0.07920398147262503,1.636434520073302,-0.3535422632653529
-0.2863505122541325,-0.16884171970155734,-0.565326712868601,0.11084833415205501,-0.14047169729451534
-1.2967976494062572,0.956347262154432,-1.0239093568601345,0.17681566003840435,0.3933250291476007
0.47256757932654336,-0.7734443633788033,0.12356365347671648,1.5197636576641573,-0.5767045209511833
-1.148606929332205,1.4637086720411525,-0.1213316218185787,1.065583941971467,-0.8771242736220218
0.41641108319290315,0.44860410318107113,-0.048211702440403036,1.1905810954795344,-0.19238478347810678
0.7424723314468349,-1.420559956547911,1.5323888238988668,0.10019814169634132,0.10215956921190829
0.581515189010985,-0.7277384739163638,-3.3165521011465042,-1.579383445662523,1.6068825040799106
1.0850679685093565,0.7711583048461833,1.6629317495278817,-0.5095367333414674,-2.2388669077753747
-0.29092838782431946,0.6391593114851497,-0.33432115945408836,0.269543359393367,-0.4874126947616235
0.8723546778057581,2.1601811747960777,0.4953534757622887,-0.5198760469403562,0.08290401912583842
0.15298172877873645,0.0437691557314312,0.6957684754307119,-0.6800328162006699,-0.9346065251727564
1.955391104863802,-1.572177308321251,0.32458842252740494,-1.0133722191710581,1.5353127390655184
0.26397235470894187,-0.2989947866968218,0.224425304827347,2.0953026424136256,1.2454293652205068
0.4115616386657295,0.15896419225303193,0.22917338457844957,-1.9834895210482426,0.7887559733981097
0.6147128246815649,0.3161276713305975,-0.8860209158117148,0.028104920218541204,-0.010455993079803213
1.3324797751116897,-0.777664612018841,-0.021875935834057496,0.2388228177200814,0.6170349984126887
-0.39957620810890276,-1.1626015265286616,1.5076244110750177,-0.15033950591377993,-0.5190117263158431
-1.3364237005937412,-0.28335577993544186,0.5093334708879959,0.7163944824012667,1.2150100965802109
-1.1408522185820493,-1.0358735104396846,0.39812765863853866,1.5880103329065205,1.532622396104565
-1.5888130591146163,-0.6707018872608099,0.21582982833107697,0.268367484761729,-0.7416509948521115
0.6445388857614835,-0.7539810229755466,-1.3869221447688962,0.27483551947493473,-0.4658838065462013
-0.1359886780754902,1.5984659952677127,0.6537730773052766,0.10012277702035813,1.1664538665336694
-0.13157611091427243,-0.9940179713910133,0.6483466068345626,-0.14118963849426783,1.8003783594814249
-0.5131103677243293,-0.882724466227729,1.272044646410713,1.3119208996745926,-0.7325893160972053
1.6043670775038483,-1.5171778872226895,-0.586846236704973,-0.32368142723015764,1.8984908781195968
1.129403299250998,1.027636560047208,0.03999675011260818,-0.20991465396649475,0.18940069495022566
1.090079248187725,1.1451567973876813,0.7098029718864286,0.37014611963757627,0.9003951727880212
-0.20382011212986126,-1.0013281266864,0.6941398110261092,0.5923306306786431,-1.1586342686651054
0.6713028312385804,-1.702155296233458,0.09346939000530925,0.266759916075978,0.18881661125461774
0.4976499251887532,1.2560197217525233,0.48870203849226934,0.6719299968112152,-1.2031582306788797
0.0067267380998001745,1.795024748664048,0.6524749864699947,0.8585150500413619,0.4328833678863574
-0.501059860672196,0.7415463470005407,-0.6601904976754451,1.5610127693213343,-1.4735861773213939
-0.4602165157719012,-0.9845066877826308,-2.1904024510899647,-2.1632601798434052,0.5721765728969126
-1.4896456973913723,0.8358752643327161,0.6121034972357376,0.6889395250104001,-0.8355063013044022
0.2877600008877757,1.295532631478906,0.092548571667936,-0.22095420301122762,0.9008827560105304
-1.4948730733686528,-1.3251199052098166,-2.2644998417958715,-1.2536462949610727,1.351902181491009
0.9253521621610589,-2.3752426139796294,0.21419521165055155,-0.6596610231240938,1.5364265582377366
1.7160965869652014,-0.12669631297123465,-0.12976990575267203,-0.5441430126573891,0.022107123667453103
-1.74278894495945,-0.8304736493979467,1.4401445581814856,0.6336774719721933,0.2105724106342917
0.11001968794231595,-0.17553032364700943,-0.5487668279914991,-0.5710351102264353,1.0632840063861595
-1.4070012490817998,0.43766069207587693,-0.9225085421326219,0.554126804594924,-0.47291460981118616
0.5715009263123133,-1.0647296963249893,1.8799788821102288,-0.8146152958892495,1.3883870271254712
0.22911113533621313,-0.342010386624812,-0.8129338501889307,-0.5674974176494796,-0.05782964428485829
0.6858096640155203,-0.26442385165900817,-1.1934546555996788,-1.205626835837861,0.3919824699589831
-0.938338233023313,-0.45799550016043994,0.7190221723609617,-0.9406531056499303,-0.3759245189146379
-0.06258166053153592,-0.6373040727050578,-0.7104638991130663,-1.2380995341859866,-0.17031414931055552
0.5156453442273947,1.0646142380875188,0.8881531100504962,-0.4826287335511207,-0.8491408334704562
-0.5135870641501433,-0.21113522187917885,-1.3493613889653024,-0.44920698539967774,-1.0733832664112517
0.6252219830159279,-0.10712569906541568,0.13412836628056277,-0.22953918230335052,0.41175415352621464
-1.7528477996357315,-0.1889940508871067,-0.29490834490542017,0.3444603460823914,1.4854919390108872
-0.2794621527077697,0.3305936265715814,-0.19807902746861145,-1.1729865741467997,2.816175251339456
-0.4170478779932538,0.9687536322999858,-0.0421647241318176,-0.36597188159181426,-0.010106505820656118
0.42618035180105596,-1.532243210905206,0.8637867700742026,-0.18400242190969365,-1.9630437787404507
-0.5831300135307352,0.6451082148171002,-0.36456211183219805,0.6809680885234967,-0.37526713028208186
-1.6184438998211386,0.27429737791830755,-0.2469362890631292,-0.42399516276679566,0.0033467574547850184
0.7999081847046533,-0.798106718707638,-0.6858379982370805,1.2454550381068088,-0.21393737576123692
-1.7155079503186978,-0.7999193894359431,0.500563987188435,0.6424328046781537,1.4560196403641728
0.1215417601116528,-0.9533767485540359,-0.4204043330987687,0.10799467301714878,-0.33828935606760857
-1.5620523863860434,-2.8521393548492764,0.15158327374227373,0.47896441063199197,0.8005937271572681
-1.3390018183894281,0.3472771035771977,-1.052986999140229,-1.464880713588399,0.6716362866080904
2.0816239403057186,-0.33563746491496216,-0.5273699310597235,0.5899235059271652,-1.2046999474800948
-1.5239101026138127,0.29204913768950436,0.463209162447891,-0.6307233546503578,1.4680309151177202
0.2145910942040142,-0.4180632006320086,-0.45452479446685995,-0.13315996679047254,-0.6322972458236302
0.332427046612209,-1.873208435421374,-1.4256299272224013,-0.18108040737587727,1.5916841526199323
0.21135980338745763,-0.3745212826353729,0.3692473021096186,-1.6575037687897558,-0.2882873772203836
1.0298129538430256,-0.37096616673481086,0.1763567796754935,0.2246335658538048,-1.3859597746905261
-1.0082233423528812,-1.651961051021775,-0.6358824350091572,-0.8308845273496163,0.04527054911639272
1.4837407018608446,-0.056209596516220074,0.27050090634136825,0.48150414375402467,-0.5838882245050707
-0.8528770053262787,-0.5637257542764887,-0.19040481550571103,-1.4929924291218544,-0.3181180189414428
1.6662700797543806,1.079000751598601,0.03791543498899848,1.5315295710266068,-1.5192480009845668
-1.0074609621730795,-0.07152955837362888,1.1099753147828966,0.7394887066042283,-0.27616459180800706
1.800604259183733,-1.6149382847737481,-0.07723377154583494,-0.8820026759548691,1.3173350423709573
-0.8423506938148912,1.4798642462381293,-0.9595037758664593,-0.07043552423593237,-2.0311825534708845
1.5261042418250623,-0.8359857607307126,-0.8564277573091754,0.1947359547939336,-0.525810960534927
-0.229672422714586,-1.4558263596813419,-0.3561782023399575,-0.9492523718831921,-0.6352136221283129
-0.4402924148699841,0.3551623070734736,-1.651026067640316,2.412284283760784,0.9546068061663415
2.3772686994395107,-1.0995699213826804,1.1015811043967938,0.07272992817121857,-0.12667581645528295
-0.8124722666504361,-1.068403686051149,0.3388447067780429,-1.3622626567101581,-0.8534706433197007
0.4140609984442924,1.1168629804482373,0.71820297552776,1.5391931552565863,0.8874333510199024
-0.5691949523437503,-0.3969460395842455,0.13151782647442106,-0.4722017187760886,1.5570143042493192
0.18338272341956566,0.23383442901659535,-0.8885586665814618,0.015213352382993805,0.3085455258858125
-1.7169727598323292,0.569917172498342,-0.13959055962787192,-2.0070548164044,-0.9242039463006175
0.986726406879514,1.015598789805609,0.9499878962729418,-1.0610940151577235,0.25160427115831113
-0.7523039424797918,-1.167883126583563,0.12562393017876047,0.3665671443422648,0.23182549245596012
-0.16377828068577646,1.4708076601006832,-0.145370848084072,0.4673459333045453,0.2667832234455105
-2.854403042415159,1.5423304950140062,-1.0200996471862362,0.7874114301474174,0.4679459437640737
-0.23461482705877798,0.9046219004051466,-0.2069807831056012,0.26578338652084854,0.09295055050001298
-1.6850599171408387,-0.10163819455210164,-0.693917010770842,1.112899460412112,1.3006762059034966
2.268856188488056,-0.18518716731104906,0.6785563542398316,-2.632975970359972,1.3026819059872345
2.267161853275179,0.4700600751540219,0.30520306324804247,2.354775402785696,0.5008124999956867
2.2685848033533893,-1.2942067681824492,0.9088817569506394,-1.5636626874074253,0.8877979287579395
0.5145230932279814,0.9700784579636365,-0.37198076664645185,1.7775368143756083,-0.42294651726786375
1.1983770529974382,1.2113567081026122,0.48464445994829897,0.06237456673600009,0.5076701960539032
0.36436668823722757,0.5716366440921106,-0.3185393982257509,1.0809739100814117,0.611271054489776
-0.9677119522443366,1.1398644611641997,-0.9340934311622906,0.8072255086314971,-0.29533018655269183
3.664382779224024,0.36880263036235283,0.1141054206534807,-0.8960726977475765,2.2817473757595415
-1.259698974559529,-0.2861492019195838,-0.40282486074562196,-0.02549271138447534,-1.2321291700201646
-1.1000875449480991,0.5333475712912414,2.2914275370408723,-0.2029008081016139,1.9723099195069473
-2.8998767779672683,-0.9413095178645979,1.9027255391593183,2.428793059673208,-0.15311263513603246
-0.9364041411644434,-2.032766423999462,-0.7350894374596536,-0.4853288835680488,0.2605652586753653
0.8008146179286312,-0.9671843195545995,2.1322508780724694,-0.2453577969520207,2.4079772201729326
-0.7601774000573395,-0.37056163672318426,-1.6015702539322212,-0.4429135953339246,0.6005386277319126
0.33778028113173064,-1.2858496594258502,-0.6242685749211936,1.0463383179100338,-0.016384414800369645
-1.4601670135070393,0.4605970321972352,-0.7794494105929249,-0.09742964350767522,-0.6261613147321472
-2.299934966709953,0.30869202412545665,0.19433235094132637,0.91854199097178,0.45105584122315934
2.0237378005989113,1.7529537493981444,0.9017001930005273,0.061926618936437125,-0.5799855758025578
0.717122973414353,0.11159169206646768,0.03951093238641421,-0.3789586670640736,-0.3236591253994604
-0.5078684962189668,-0.5448643510920909,0.6474659320231063,0.9600012544346831,-0.0872088378172026
1.5421305399298455,-0.348815272687749,-0.00329033064338271,-0.9585084530881295,-0.22279817117555661
-1.07903066937137,-0.4301095199569797,-0.1870586513730641,-0.15945634152725077,0.09305378017937369
0.440929587655483,-0.5085638514755255,0.35235470295079896,0.41604678321028665,-1.5243911842046762
-0.38595730783800786,-0.7085208729637826,-0.2390601474504664,-0.9929016301065501,-0.19676383325799376
-0.456906389314444,-0.37133725918854094,0.38292630228702057,0.8013188095717655,-1.2839665369956306
0.6151994756560557,0.3799049679569031,0.647496838254968,0.5541787848875149,-0.7219088694348785
1.8176919099098,-0.30229766617694526,0.8609421606162251,-0.2987242316015869,-0.22178002422550044
0.042156160607923564,-0.5729176422506216,-0.46881155198479213,1.904610800956972,-0.9172696538051175
0.0077462819883195025,-1.3302490742630475,-2.4403031138123055,0.41763244716685577,2.081873836318342
0.16626931403949421,-1.4411698652778824,1.2691819957657295,-0.2765267478853167,-0.9107832767575728
-1.7129572226621086,0.44409732581735895,-1.5919240978565479,0.6464859418909379,-0.4893786592728836
0.37117006375756706,1.9338763012911808,-1.0302472199631934,0.10376610085525041,0.09759341190835732
-0.2951513955638328,-0.10067508500692664,-0.19589431928873888,-0.40227487210948054,2.1513967361147115
1.1702563137477955,-0.39890801982310553,-2.400757010934689,-0.9729677590396536,-0.11940764695962822
-0.47969730568429625,-0.4893679737751506,-0.09861586120604843,2.253042169047145,1.9334305267345067
-0.39752642340759686,0.9450587575025581,-0.9425568037588984,0.6431909210392157,1.1749933302256856
-1.1745577638575855,0.18649731935626115,-0.2694580221024486,-2.148755030175705,1.1468125983381974
0.799051829725733,-1.2306588735775177,2.351891725213943,0.815115276064649,0.2009976359917422
0.670650580706707,-0.5690493537438227,-0.33752000796936343,-0.019691421484350154,-0.6036378985902654
-0.5794068541087345,0.21300700447941906,0.842801870731334,0.6784812323959225,-0.364782050516428
1.0184424325813872,2.1919079481249266,1.1403404039204899,0.11003227598476607,0.8659725896116353
0.24930760281693212,0.7247278883595947,0.7704240935584882,-1.1306475669523348,-0.18631811702670564
1.7543787131957957,-0.35743111216666146,0.30182408241143,-0.6782571492257777,-0.6862296565851006
0.5943484817373673,0.5940789131189599,-0.6869583234271659,-1.4829990270029463,1.9521705227534103
-0.16805036348415686,-0.74225280577122,0.4559722293437188,0.01883682716480371,-1.6704778724870917
-0.6621476517185413,0.7823398992794073,-0.4495651912149221,-0.2608572243177073,-1.3511720545968424
-1.1122326171375267,-2.5007934892866657,0.02833666900538834,-0.4580019443947788,-0.10512120952071974
-0.9287953604840529,0.7954354713392979,-0.5579703553008218,-0.0156327938069746,-1.530669690386279
0.6980756558380857,0.8062387483413346,-1.2324799924929666,-0.6093882587593252,0.03411541931620551
-0.8312339925291803,0.6400636064260413,1.0322750471103155,1.076478452154165,-0.19892070825715327
-0.4449093369757242,2.5429972498723257,0.2804297334068226,0.27037570152814744,-1.4910593786606827
-0.2969615312118311,-1.738651882282191,0.06816913953062144,-0.003870896727788335,1.2355153756843058
-0.937002287297365,-0.9334974229152254,0.11456562765820218,-1.2382707244658433,0.19247754269033804
-0.7432446370706517,-0.7156525378836496,1.3158327839459931,0.08072107740636325,1.1514803063151113
-2.472733681305562,-0.07857675998824798,-0.7315697570818231,1.030615711589161,0.455083277521595
0.16576637832212104,2.6051602005274304,1.6332110792656247,0.3911415292270756,-1.3575665604130844
1.69605761940418,0.6213079501165362,-0.8671561770016466,0.1750017941686912,0.8843180167512651
-0.4478848380534496,-1.274676444484432,-0.08442299086762131,2.534295747213868,-0.8650792604651539
-0.2957110685199974,-0.33495123085802064,-2.8351755027933754,-0.021536958576265405,0.8022460957004247
0.7435130097142923,-0.3920794730994477,0.4946635932382706,-0.7840963374203854,-0.13238653089926114
0.591050584099981,-0.48429274955106444,-0.1283896414038974,0.9924564719750948,0.42491935635548334
0.567299751066203,2.1467070785199076,0.08831173752380822,0.10298351394432395,-1.358926221358403
1.5011010093396702,-0.6856490623550991,-2.2209222091457708,1.992783572700373,-0.2214331194316352
-0.23741113869853483,1.7826976504812724,3.2338324559095826,-1.7838503760289386,1.2484928725590283
-0.010996438887954393,1.4945350964349555,0.46819122023782656,-0.5102271241638476,0.9109298231536215
0.3659042627261738,-0.21681067688468206,0.5248827583705673,-1.399234167123267,-0.8291022976833152
2.2491653290752747,0.2736614136903904,-0.9051133971625971,-0.2714383981693369,0.30695934849489714
-0.0984724734577104,-0.26480951925416624,0.6205876903188433,-1.4287965506721658,0.8049525166571506
1.1607795686970974,1.666837282737901,-0.010880961592536178,0.5375898655779034,0.807069540416975
1.3457032886887448,-0.8566924347475182,-0.3324632713300583,0.2262518377451055,-0.9468373185414243
0.8102258371892989,1.5147392891032871,1.1193433791328737,-1.1800786517338984,-1.2650982195597884
0.20809419946633412,0.6287535498483733,0.3989621315003926,0.4639278638275737,1.436034137443055
0.8485931464596896,-0.5104390102371692,0.09294218896861842,-1.967267371767105,-0.1356752596886242
-0.919288247919133,1.4880331947930674,0.1860819008579799,-1.344483606550222,1.0863333706842813
0.17643079820725965,0.672468227396221,0.6571713064641372,-0.005803506670345223,-0.8883415685009933
-0.2772057106405516,0.7203856431087946,-0.6676483484820838,-1.071984149098061,-0.302793586556715
-1.3302966404156569,-1.344256419076503,0.6080727384029129,-0.6003503372241548,0.3290301279134863
1.915556179756369,1.9441885316799805,-0.39163135638228763,1.4853035274425515,0.7365072796034218
2.156092069982188,0.8837013958398046,-0.22135627479204203,-0.647927127057229,-1.5828631013898053
0.9893824548605746,1.8984449075574934,1.742139730148346,-0.7815804539874385,-0.3416872265077927
-0.3391496530730507,0.4333852253998928,1.616436433772322,-0.7793446382899027,-0.21603064955887014
-0.9859654237875412,0.7930938908600477,-0.039504371782893254,0.0934000573628616,0.7876213020351398
0.5336122495935163,0.28814740341271,1.391615544341288,-0.986201534964671,-0.6139874223362368
0.5279108703981007,1.0263426269372218,0.6857815649057967,-1.8355443312415385,0.1065260031448327
0.09629394259826018,0.11414600596947977,0.6604648338205699,0.8171664448098981,-1.2345203670603593
-0.0562830997792367,0.995853909675307,-0.2007571564903675,-0.45118248523466215,-0.9122516997648623
-0.44422382065899435,0.21117458294283892,-0.977441467351695,1.7104491626640177,-1.259405880668654
1.6668483005046888,0.4125362469998341,-0.09455216724735813,-1.022202745271021,0.9461903966444716
-0.19066857313431798,-0.1870913851797641,-0.4781221663397042,1.4534566250260175,-0.10753800373542183
-0.5501134552881132,0.43266608483411134,-0.8159671675783461,0.7712114401650146,1.76432894305339
0.2983227413713589,-0.42046890324996405,-0.3689557031250729,0.6956904874985107,1.507462895191877
0.8394467644955208,-0.4829200358295314,0.7614979232008077,0.6056439787430733,0.05436688635023863
0.3385663751967121,-0.20394009852341954,-0.27287871537374025,-1.150043654440567,-0.7831237756808511
0.27033279249059006,-0.3274168865719781,-0.5096703342700458,-0.15595120134838417,-0.00810186399526167
-0.06618013647210191,0.9619745055898848,2.198678162872916,0.47467712142443974,1.1807036324231939
-1.4100911404346694,1.4714133499394977,-0.09132870881763928,-0.5466386464436809,-0.7010195654341895
1.4417010105220038,-0.9968195013273518,0.7730911260961032,-1.3803175736737507,-0.19222660268140282
0.728145720325142,0.13728295243616592,0.663083268532493,1.1348844680494283,0.4750365970580185
-1.2241461433782967,-1.941245419345188,-0.6006719753314662,-0.07870957866098219,-0.04803282058611871
0.77050120593946,-0.8468695739954409,-0.22085319283747418,1.281916625377369,0.5273143516021777
0.8295350868903152,0.8449945380319281,-0.7987743962125948,-0.5330941091171993,0.3641804454220141
-1.338748463774929,1.2804341592215727,0.7962520853557262,0.22011077909450555,-1.151373157706216
-1.183378878810837,-0.48730883635247757,2.0834740524178135,-1.8193198289768255,0.25234386795185343
-0.7767280414906464,-0.7950216636353279,1.423144214195351,1.1672587599130992,0.63802041006962
1.2641875298786798,-0.11939545159627049,-1.4111063586915649,-0.03712202046797638,-0.8838970784877788
-0.568364783417417,1.1340315609018559,1.0946839821309862,0.8634398085185467,-0.7805414083758965
-0.5990518833145807,0.12315809692056134,1.4326562950933557,1.179849576370447,1.8694021753067334
0.9702835496910993,0.14747251618633125,0.1668129807170097,0.6571799569445911,0.2275698767355967
-0.2728859203794516,0.6140509991432148,-1.0846500097758258,-2.044625667794117,-0.03537099141208872
-0.8636037699289609,0.6373225637661996,0.02589851646789953,-0.6595344453493144,0.9408803506588362
1.1473266073622743,-0.4204342451690851,-1.7221937652346322,0.8031639453041524,-0.3415185897033306
2.325496955573183,-0.1997419415227902,-1.3857013585131555,-0.3629069443722957,0.6260005198671229
-0.7328912850004855,-0.8300186950570093,-0.9085650946491934,-0.46037694600832574,0.31782373402353653
-0.5904615151373106,0.32306962097000025,-0.4096308633292564,-0.7569539235247853,-0.1772492097590912
1.2717676972570326,-0.14625442347944184,-0.8129920775897788,1.9029953362092986,-0.3893222392346595
-0.690969061958795,1.4890203245485825,0.27068766746281286,-0.44109078294417525,2.289866652168469
0.8837628359483262,2.1132093066131956,-1.6334623334445613,-0.25203987970499325,1.7873231595289985
0.6536086266875547,-0.7193032464269603,-0.5202103682876261,-0.865253228394144,-0.1369647396405498
-0.2228910709256129,-2.243629490558398,0.04379431010452831,0.6774000815041069,-0.08674776312933542
-1.345829169487248,-0.522320385444747,1.2518130100892477,-1.1143746839320139,-0.07760477239626648
1.7879218370059184,0.11680253390064745,0.3153123193566241,0.6122124650505936,0.5153923459104517
1.352746444577023,0.4212707582071746,-0.5049634339969291,-0.37137591115649554,0.5980685714682616
1.16731874588911,-1.1258551120513884,-0.6394118394178455,-0.4171635739245464,0.13329696365685234
-0.2935191119048103,-1.7801519138465014,0.7841039724142312,-0.45063786659513855,-0.5968611622523762
0.06934106821714818,0.5488484302215381,-0.7300519532618286,-1.4440118106628967,1.584081677303021
0.5570234543405226,1.3602828357915842,0.9061421553466682,-1.6682325584406879,-1.9011924538412088
-0.3815782812459224,0.5330086831213334,0.5865857484945162,2.3551568402383234,0.3171843165550265
1.061070376287747,1.859675576115746,1.0250987490182655,-0.844757761775086,0.4021820492258321
-0.15527091739574134,-0.7539458683039914,-0.8569007210516674,-0.011222781612468816,0.9321572613880249
1.5210267751747872,-1.0765790405737878,1.2384516677033157,0.795028675874576,0.26907240168586966
1.422183159767695,-0.3585759388425618,-0.1621153103582819,1.1137457355675628,2.509056806443842
0.3229088604268542,-1.3585639029316967,-1.234290161654524,-1.3001106660652386,-3.112905470874204
0.869452523413432,-0.938340257018087,0.3917391250390664,-1.489840749359849,-1.3206283972978703
0.3382754531680507,0.8314576333819973,-0.7548428522679583,-0.901759246610061,-1.2137773525663382
-1.320814428519941,-1.1326484227317506,1.3793503595598815,-1.9111646076988718,0.33585196748750795
-2.7102869873568745,-0.367785133912178,-1.3145049430278215,0.010430989588212265,1.557419996964459
0.30771724018306174,-0.3644626243906255,-0.1194666273821367,-0.7239927238942966,-0.026364763710499364
0.28730284591448174,-1.0955610377821263,0.4675253823901605,0.6221021706907838,-0.6032009648155023
-2.125049955902238,0.4445256418532265,-0.1532267328311059,0.19110554751566539,0.19614724488267768
0.2207633554679639,0.711589303459055,-1.7685082103765946,-1.097127858688612,0.8839898855574103
0.6799650765534999,0.07824890094501047,0.05536407831903705,1.4346546799103013,-0.5413788653570129
0.46609364574861745,1.0455713074611297,0.13342862291584376,-0.5158733558948679,-0.8248059354178618
0.25075579768627876,-0.4521018522519778,2.045639597395016,-1.6537342545991942,-0.02999329172627201
0.19814086414517762,-0.4201531267231991,-1.2592541585366543,-1.5499153550576568,-0.24009048685493856
1.2350815615904571,-1.2157870056148266,-0.31200722994983343,-0.37764179083721255,-0.7741099828263228
-0.8757332735173006,0.9717994864928303,0.03812432482045304,-1.202991189228989,1.2164925270082176
-2.1388766216417237,0.3326957301762743,1.1119713254387609,0.45501628692888313,0.8356183039622219
-1.4351106276837884,-0.6007566114940174,0.4083754470042847,0.836154150975099,-0.9861472201476892
-0.8892201317728824,0.9022779862555795,-0.7911207170285046,1.0756224931058762,0.15608765843779213
-0.03250608393983046,0.9280378432088023,-0.5079294833114584,-0.2981208337097395,-0.9382237705411298
-0.4793298216919286,-1.4581763032343182,0.22711859997984363,1.3085929024051384,1.2447283870045285
0.9309069698082744,-1.3329636913403133,0.6493511973152324,2.0491800993661218,-1.375737732852573
-1.6623700123437033,-0.9858241005256159,-0.5295347949621639,1.6008296816465342,-0.9929947677425833
-0.32477471875985103,0.30127208301326974,-1.2026373858721366,-1.4598319777545086,2.5272614480744826
1.461852366981652,0.0867095438482338,0.07830067399100801,0.7333489940231148,1.1944744541507317
-0.3618498470289744,0.633172903659494,-0.6922678802913559,-0.6008685396209537,0.5086805107098055
-0.6934937744725104,-0.17922584888693177,-0.8005127785946992,-0.37308723340989075,0.2798247283004577
-1.4529225374157422,-0.41829046045346685,1.00621420766613,0.689614554234999,1.5933357223757927
1.557301829282106,1.4558864849814666,-2.999204527083898,-0.47216111217877027,-0.8933949077320682
0.933393467330404,0.8653162587631088,0.18539316856586896,-1.0208539020682401,1.6341078173149768
-0.46892532645845153,-0.7656436853071786,-0.21524579624343604,1.5803014034667453,-1.1899818032623393
0.7321331405159434,-1.4079626251970063,-1.3274635157323638,0.36998350578810313,-0.7301703108084496
0.8746814721792799,0.056101296273521284,-1.2285621566927944,-1.5152113538703518,0.8840792360064272
0.10419289637479325,-0.8334125773247372,-1.0672606033155478,1.4951855010929418,0.5591710446054285
0.49735370434741094,-0.964530282806604,-1.937745312304408,0.8029667592522468,0.2917294628188639
-0.24489547546734888,-0.5110219577306782,0.3764800067964241,0.2917030138435622,-2.404756707715142
0.09266987813041343,1.4466297884790613,0.6901147751649249,-0.37511522063659636,-1.022468763708578
0.0797701113412552,0.5657185804076162,1.031936140647203,-1.2934796905478525,-0.31691489374324366
-0.5641711720378118,-0.6485854152419122,0.5396913773704373,0.14188463139064597,-0.5949151340348052
-0.7010194575809776,-0.006416255365753062,-0.7083490080535583,-0.40897400261957423,-0.5789374856759668
0.36737621937469644,-0.38103908782167634,1.097141113305788,-1.1042864064209514,-0.6967353118662665
1.8539537389639356,0.21032058667255696,0.5933119067407532,-1.248697432064445,0.156240113721066
0.49509900171207216,-0.5004297184825358,-1.697690900638293,1.0879404453745511,-0.6809302687218536
-0.5462424819366618,1.0636951039721416,-0.3605679660548143,-0.0019043709850198967,0.6036314206244092
-0.5755844752178574,0.040533357725725486,0.7901291894147684,1.3898646095622518,0.40623953444462896
0.7868906132431724,0.6059007063355587,-0.6892446542853743,0.27331772978073315,1.3314761855568236
0.597183307347324,1.180026280857843,0.2456825681647662,-0.4880525126652885,0.8713506774766641
-0.1788608568444374,0.9120652362689289,0.14284127140920577,0.28858032293938496,-0.9829846790191563
1.6975700365147146,0.30284707821549156,-0.8178078136674402,-1.4899088748946712,-0.9948614451579135
-0.1612760441426801,-0.10164991338116305,-0.08065587483530041,0.8392426976647348,0.047060892665434007
-1.4858627116312655,1.4832533653233397,-0.18229755883708443,-0.9175358652249989,0.015936817776695165
1.6453105721077854,-0.2766936177518231,-0.4836407000508438,-1.412937050909733,0.12997421777323714
0.0547988238957483,-0.8027697112363106,-0.1093453136251392,1.0766920040130712,0.6354906718663058
-0.29366392965919935,-1.1708161086992368,1.0159753396459748,0.2745280841102553,-0.394019017901227
0.8065266789622559,-1.0646971849730456,0.07585754845540786,1.0076274442443318,1.046185388716427
0.1342073372803411,0.24371839104549642,-1.2924814219826266,-1.764914802281839,1.1966794973488835
0.00889129132935682,-0.49580446236952824,0.1318258437109485,-0.9929989178348669,-1.334532409793724
0.717882298752607,0.46116029181205265,0.04048676315291508,-0.9874541225288694,-2.00231751331525
-0.436606412277158,-0.28635836668494763,-0.0006225111243449027,0.054408768841639844,0.1186114351903485
-1.5458590981487985,1.3611885622988318,2.0826542145378273,-0.574568357503919,-1.266035502991801
-0.00580072822920588,-0.4303060987526579,0.7853752078855512,-0.9672540114831472,0.2516815683680449
-1.4620867844780219,0.2779668944162463,1.050707515935328,1.3608613183247737,-0.22464125765499532
0.9981610767426742,-0.20227440564343133,0.9269754238904787,0.5195800668916722,0.827443479487194
-0.2446242983578778,-1.128643300705348,1.0279738309185236,-0.2655923378191001,-1.2444406082317871
-0.0022738324480915824,-1.8056131185591455,1.0374003590951415,-1.5394409451798245,0.36911400823409013
-0.8521832817766116,2.831412636126939,0.010785686964700367,1.532045453373706,1.3666216698370317
0.7078730453818681,-0.7927686802057259,-1.0160466989807007,-0.08840555493677739,-0.5892094325802941
1.0149547820143576,0.8764111915777966,1.4728068990406784,-1.1085787639089726,1.2654509850934248
-0.32481225839099703,2.2988888166587875,-0.8099241973300008,-0.5383883216358755,0.9112753958484885
1.3282861717482022,-0.04578253039360124,0.7107956736231927,-0.12946433021543802,0.05370201799748987
-1.5152385664523833,0.20472550898603614,1.2233422765341606,0.36753945277757927,0.5502441312471384
-0.7050589210577398,-0.28929468673034514,-1.567051138883843,-1.3759828094360882,-2.145258966929627
-1.4636541094024709,-0.5139445602498205,-1.0882640582201824,0.3569018037067767,-0.3314580853213008
0.1260274232041025,1.7449201567005923,-0.471269996525828,-1.878626429028945,0.16775456413288417
1.9472461168223656,0.2554159330835947,-0.538384629357065,0.7182174082304287,-0.8591069440924797
-0.6572834117175614,-0.7287624977627274,1.8401458140419433,2.2887042857881568,-0.605505517227536
0.3870971466294993,1.7687723245364833,-0.4363977975377755,-0.7687663572149404,0.18965565420879021
0.42814894445566754,0.3373322421238139,0.4341238741724789,1.2895241503831738,0.6113030938307864
1.84122241645766,-0.676182743704939,0.029050709809126395,-1.218198649926405,-1.4970720494818475
-0.04896129421399138,0.7350003962913678,0.38345213450897836,0.79761540893734,0.8580515920333027
-0.7747052349578705,0.47029806688453585,0.8880132496622687,-0.9135232392972968,-0.7224028611569409
-1.4266760438262143,-0.7789017773499471,-0.23466698650562823,0.31834761415394314,-1.7892032670773845
-0.035216341364869105,-1.4866161469778933,1.819866898010304,1.7200864010704267,-0.4861718866373889
0.23412807942553943,0.40934076727077895,0.8852793660836872,-0.1392617020703315,0.2584730294740723
-0.8852612343991806,0.5890735084939889,-0.43827427311501604,-0.21863266545954363,-0.3289054691199657
-1.3255703253846132,1.825455604241404,0.5104896665533579,-0.29635098267685334,0.11172873886094026
-0.703149078973588,-0.19155198314983396,0.02182353619704357,-0.04648981633548332,1.2222651532446744
2.184874901710487,2.2798943508952236,0.6362130220630182,0.28515539737498313,1.488647750171621
0.771011014810487,-0.4981104948178249,-2.0068802457452493,-1.28793322388327,-0.44477582114903325
1.205278570787435,-0.8638608017070624,0.6542651382288626,-0.4088707878442595,-1.0819308149304627
-0.8336881156447407,-1.9419450887699035,-0.8248963586912094,0.023560937542024988,0.16001738072929125
0.28626847576872305,-0.2828634697531584,-0.9000596194042639,-0.15268178490719553,0.8823290836466464
-0.1399097052196736,-0.6496674185330132,-2.3745307495871564,0.5325218972087866,0.19584148917456012
1.8298622976284644,1.115273180245058,-0.975214717053966,1.0750818447764003,-1.5400137459785748
2.6131980941291912,-0.32541149893981874,1.372527303373983,-0.49761298596318804,0.9547008562202521
0.010164937439181423,-0.7888813627771426,-0.2108531058520518,0.32868635915003547,-2.6960201060487283
1.971536239843046,0.0994716481247531,1.4244925696341646,-0.2662781110150085,-0.7504704703722805
0.5673031511223084,-1.0867179008814765,0.37221903508086457,-1.570517520338032,-1.3924618136739686
-0.10939406101934471,-0.8520514619321123,0.0002693150861267816,-1.0952587332853532,-0.20849783499364566
0.11235262526182672,0.06589689502330598,0.713536217216975,-0.034406987405681926,-1.054838534681739
-0.3338349536930667,-0.8174906638406008,0.5951637557863105,0.22156844428770217,-1.2092553306261358
0.4158446385546248,0.3556516612712472,-2.063029753705192,-0.0073399491598329985,-1.8156600028750343
-0.03268353359295608,-1.3094372321911463,-0.3996011039845573,-1.5296800524445973,-0.578193922042772
-0.3861751014201558,-0.5122968745471274,-0.10705117608341393,-1.891524469107405,0.04596371083201015
0.3162808846061576,1.06913409135888,-0.5933564870948052,0.11179874584024066,-0.9395075587344409
-1.807391328265619,0.3303179145665536,0.008193536372355112,-0.3335521784310708,-0.6952468220187727
-0.8749114919480698,2.0449950945136197,1.682866842397157,0.8047065829045109,0.15424463608188566
-0.5766746945688541,-1.9993231212483378,1.928568452251629,-0.5529201363125994,-0.8581034336573601
-1.305346044552636,1.622382475783028,0.5107058616780465,-1.194733808900091,0.9912601346352344
-0.7513586019604043,-0.5569202144949513,0.25110707728698073,0.46294005563584373,-0.5014807430425252
-1.3647939988300102,-1.9932316139843,-1.3389574937228403,0.14391922918804154,-1.4822057969867986
0.20507028100833932,-0.696886156609509,-0.21258429165406514,2.1420946978737434,0.4077817737665678
-1.5383774745514558,0.7317417406350917,0.2813086817411673,-0.33393246212849304,-1.255773459308323
0.0760501046096696,1.3441012243843709,-0.7617268745196598,-1.7089926399380728,-0.13344409159084497
-0.11279294715901067,0.6067749605772771,0.08106280658423379,0.028303837361641507,0.6135047459021462
1.1731019396515479,-1.077099176307197,-0.45187229522257755,1.2815395735461779,-0.8099936715855194
0.8923102226443496,-1.225780492246937,-0.7031517877620984,-1.1994335293205212,-0.22256755335544695
-2.620362482269696,-1.0360724079088797,0.456563337757618,-1.0636269509197476,-0.3315148682398312
-0.08913814318830701,-0.7503319565913883,-0.07465601123808681,-0.8362670806273638,0.6181039653424097
-0.9889020376297457,-1.4962808490714399,-0.6012813674706625,-0.5448337119674893,0.3789292215231901
-1.2682430367626216,-1.3747576082787192,2.263627392861225,0.12469746303452528,-0.6678477154436204
-0.4618817381739398,-1.4295385527878857,-0.4233832138728019,1.893007116107891,0.9545004397636467
0.22717792870387915,-0.47363932200397535,1.3729321374299184,-0.670563079166575,2.4536565522059695
0.5029610207396663,0.06970818356983018,-0.33499957988090495,-0.6333056018962246,0.04370708568948441
-0.07790205322494312,0.5003283361600862,-0.8794800153715188,0.7204450596254506,-1.8721066542702616
-0.8571341506736105,-1.0567761157994902,0.32550976498219925,0.8739810030701202,-1.293495545364079
-0.8681506743563735,0.3308722464507279,0.6403552717903014,1.044997270706241,-0.27398275081966783
-2.763745420074169,-0.09324528159283618,-0.04785718186913704,0.7129522414367848,-0.6218365722716894
-1.99184911001784,-0.30223444478230344,-0.6420504730868161,1.319167501729177,-0.57876214158788
-0.9110341795087379,1.4112165969216037,1.417123411984058,0.40177684016463655,0.11063449850949962
0.6716504756239049,-0.20868019597956977,-1.9441770342492883,-1.1408959334638553,0.3576185831423597
0.4061978186147687,-0.3101056365271119,0.6037574025427349,-0.33566222093617304,0.18222562990023616
2.171058041132001,-0.6053838204079632,-0.5653347489502146,-0.11027346980002332,1.154226021733674
-0.48227094864047354,-1.0266855900961942,-0.06338980283136554,0.7336279480306509,-0.8935896066915201
0.9382882496410587,0.9096590209402391,-0.0005696170927403347,0.8621422772819891,0.6869678459688041
-0.8262997567985293,0.33647872963850406,1.4105379260520787,-0.7523890851213861,-1.173541766480835
-1.080692493804818,0.425923481606447,1.4474823845025793,-0.1122295709970408,1.069571480314997
0.8406599246609231,-0.4323362054750649,-1.112465706998528,0.10254230716954364,1.144337781151493
0.8142450647220107,0.8327113367938455,-0.008896453299185371,0.29853341710093145,-1.9536183594559071
-1.247823567513739,0.2604476744902877,0.9018642175227793,0.46506089656011,-0.6448510530333175
-1.5826452251632297,0.14487902398610786,0.10868117381464394,0.13146181722111247,4.4216076879074615
-0.34786135032072507,-0.45901200233081985,-0.48685829278045256,0.18305653516915668,-0.10287255538890942
-0.6952320028882047,-0.02149156880464196,0.09940478081200088,-0.23529319520430877,0.6356080348083779
-0.2885474105278091,-1.0253162428691236,0.9775905474259992,0.5979666563887491,0.5009450877615609
1.3911308866946024,0.25953268192488466,1.3039340386520335,0.19548033583016972,0.5715434920761404
-1.9732313897225175,-0.19447160321183868,1.1725560582405303,-0.10825805276209079,-1.6372839770462708
-0.3704875270387971,-1.0163623011401013,-0.8103037891399557,-0.3267060390893622,-1.3058071387076429
-1.1007974235658837,-0.6970118629055273,-1.498457710099744,1.0268135936460852,-0.578065025407548
0.429003043121811,0.05067263149934449,-0.668066243783539,1.085488970149136,-0.3353016718575155
-0.3831527371883042,-1.2915963655193743,1.5473219045463693,0.00869472657122746,1.011345107610939
-0.6514099000386351,0.6589909495239318,0.1455959750118425,-1.4028279528661693,-0.1773489877238925
-0.30280501373541036,-0.5324946635726984,-0.136535968502739,0.7438839442631993,0.6858499063749527
0.2583730290941695,1.0231059607884698,-0.034873775996634486,-2.205609132619555,0.6279611043877049
-1.3585263303059634,-2.271663315482056,0.03574747940342167,0.6119764951784156,1.0790788522982924
-1.7881965931329502,0.26114534809928774,0.28591859938825603,1.011877523156298,-0.05209721724712949
-0.627902512570045,-1.8802681912400585,1.5327445691182497,0.7354472505470712,-0.007774180643581588
0.031193008253478028,0.4465882149242095,0.4580985024239538,-0.4911287645628977,-0.7526259903314911
1.8662005973897433,0.15797739108300507,0.00823274203858496,-1.805460272488262,-0.9988024128638561
-0.5479671064591742,0.9373967336728491,2.0758979765846326,-0.5878452615407905,2.7354361751688563
0.7390913630762073,-0.2372046101518516,-1.0507914922210753,0.6306459005587188,0.3727038946639702
-1.0401705936744194,0.9802232148974781,1.4781453043470314,0.15016916919396522,2.0646931966052393
0.7814323965761105,2.1270864562547693,-0.33046005117574695,-1.5262130620097942,0.17168301507117836
-0.29262414747136195,-0.5433248407964963,1.2221806868183935,0.15988249999748777,0.822909815041133
-0.24652607167742988,0.7852079142406114,-0.18261076182761724,1.921761105352621,-0.6303721266248423
-1.8208017944567478,0.08876655669121242,-1.2081551028979873,-0.17551313055936085,-1.36575859533368
2.3108884746880407,0.7218183930341902,0.17791223740906925,0.8218641969487936,0.22878303650766565
-1.110706804148624,-0.2878658856244508,-0.17358157491902335,-1.1590699499795325,-0.8841803151897245
-1.3595483214853226,0.13641786930359487,0.8599374455407743,-1.092515184863099,0.1009758913426759
-0.15812781743146484,-1.31983724323861,0.36648964019338226,-0.4046914038716771,-0.24612564161345568
0.3536572440772191,0.522414696706731,-1.3379799153153424,-1.1407426142669626,-0.04153797216237773
0.6099751593421368,-0.7035098515750007,2.9744533798892543,0.015651997624680572,1.816892928643046
0.021007527634391163,0.4176426205171651,-0.13036507184733317,0.8883215180586185,-0.34077821702917865
-0.4063351364914069,-1.74634902315849,-1.8216561344481366,-0.5806972555776848,-1.1143137660895164
-1.68684791330634,-1.1540719886309478,-0.0740970460781358,2.3681612272683417,1.1828285522440949
0.934473395875469,-0.22945191299781098,-0.09397003105356139,-0.08565098623350946,0.7264011512928272
0.7260878579402814,0.9151946804911286,0.14521361216034231,0.9843374090892905,0.7113741792796404
-0.3888597731010442,-0.5364378376091513,-1.0511635909825645,-1.2910311147559648,0.4459594944778219
-1.5935167462222872,1.1625295458286158,-0.24401865906934628,0.6641315449033596,0.8904864746507392
1.4449778840409988,0.3766850334490999,-0.22170697421790647,0.39933607226029993,0.3709764682486683
0.9006154177726504,0.3188759394931649,-0.9620775277304485,0.38789356682554466,0.2174709126023095
0.8424952565998807,-0.9934653960520302,0.42452804302059627,0.37868707463270956,-0.5649287958654013
-1.1447530336228287,0.5887007218094692,-1.8942961909419158,-0.36271598579477127,0.9944094932555194
0.588775821050008,-0.9945631498447547,-0.2372383282010009,1.4099681616281987,-0.20044497591663246
-0.47656011469099463,-1.2414245426913957,-0.4417110438944809,0.09808882093517259,-0.4118507409675058
-1.4528679745216344,0.3843420139580129,-2.80042674021111,1.8620630980908834,-0.9000059124650535
-0.16260849890551035,-0.24473747336979593,-1.5067402780463917,-2.2149620401947847,-0.8213724462356597
-0.8491378669606586,-0.468627203394459,1.765333777602874,-0.831842165110892,-0.8707116010906184
-1.574992115444738,-0.35219193626033246,1.0154478905742927,-0.9459438210848056,-0.6775921170363511
-0.6313398353585405,-1.3392932983157044,2.5031389399281254,0.8677894915409726,0.43176699102346705
-0.6603067403705384,0.02175021739540944,-0.4095226029003549,-1.2567824568505714,-0.5145828818443615
0.8081043182370617,0.08207505065541632,-0.06337640706446077,-1.2022965253559534,0.255611990529399
1.9030861485206825,1.8473320935152302,0.8923292641468651,0.5431014143735119,-0.993204124706949
-1.6647175259952374,-0.48171291009200956,-0.1573743167347153,-2.926144919390847,-0.6464650838002471
0.10545128636982713,-1.0520122935842835,0.9346615595324128,0.2635770091284102,1.296498559929869
-1.0459583758340887,-0.14407772775632635,-0.1447068987290756,0.4085746783979354,-0.25050123053832896
0.4329756783871372,-0.8980266671511657,1.511702332055672,0.8939594509154718,-0.40841081803555684
0.9896093863897998,-0.5298107861206431,-1.9174647924387385,0.4553539060697844,1.9313960116837923
1.1815238275340485,0.0031712041687166803,-1.8879406600343906,-0.09513026999836457,0.6513734322087583
-0.06499136673669663,-0.3129853015754328,-0.46798394564196377,1.2746758532910345,0.7719848972162097
-0.30787172649718253,-0.4217109932106824,-1.076997203252422,0.0845871626040979,-0.4394050767177576
0.04051423496283017,0.1934719692635596,1.4415940758126635,-0.9140990477182432,-0.506549820698313
1.054448686028596,-1.3103152503351339,-0.5891958443511033,0.25427123697831217,-0.48278452712310593
0.04389649329866476,0.16097216506735154,2.439015750995338,0.3533286501628651,-1.2836827380186038
0.46969615790056823,-1.4524951687144496,-0.31015337698986417,-0.8999928794985971,0.7184852217151804
0.8058813413236589,0.8039544199990905,0.5042541418474672,-0.8141983124503136,-0.6519687397477612
-1.0297949030679665,-0.06904751627872555,-1.4786745647665618,1.2244340520520647,0.5743281346442061
-1.4782889123834395,0.9748785616327911,0.7686493604224904,-1.3006616448482577,1.872846698540607
1.4357753648834781,-0.09044650954971307,0.7114050181975675,1.0374983236426494,-2.2398370183348915
0.6503824346372971,-0.2480318815018992,1.0247952067044905,0.7028898900768379,2.441713984253647
-1.3278066339885655,-2.283634298237638,-0.4803544072953041,1.5713859909800172,0.9654924666785548
0.4185878331003473,0.6138487107108149,-0.4428561377352022,-1.3051247252707483,-0.9807653520964145
-0.8172313360101614,0.4423956405953189,0.5299485734712907,0.8036006600823211,-0.7166908914368587
1.6083478455127012,1.0408670917647178,1.5726962707110124,-0.0935888042699846,-1.7639610201632432
-0.3925018925388238,-0.14490149533506372,-0.2262651541276081,2.079245094319093,-0.6857912821143203
-0.15898419626244148,0.3807330281657612,-0.17064467017508625,-0.5709888047131382,-0.06359008167663975
0.29515018831315226,-2.0093684649953305,0.5301103793840898,-0.00223405106299929,0.29104120955902696
1.5961889422565676,0.37536472714984903,0.32111859356859745,2.457624678398316,1.051893290530197
-0.640012655688784,0.08530614847456788,-1.5275011763712394,2.368482312024948,0.20559253140593708
-1.088429601472991,-0.9586923363974518,0.8945939254333646,0.33573047113127447,0.016435091140851357
1.0719907949162606,0.7855531928507323,0.2716107565782759,0.2898319917138382,-0.6442185410052215
-0.4294040615375297,0.09383036997023467,-0.8411561876073724,-1.0783153150455531,-0.7916746646932661
-0.02300411414362912,-0.22481657476357525,2.2434636625398885,0.43272589123919053,0.9938868847405
0.858209607688125,-1.500242895605221,0.9382485044223703,1.0204107344436852,-2.2570318571492525
0.9087922555656645,0.48663795202401616,1.7213653683240435,-2.3040981031952192,0.6053185813063364
-0.8651837116477771,-0.1462753778072272,1.2998768398184544,1.4444106988038365,-0.3652205131394192
0.1509172454795824,-1.2466173918488415,-0.5336371215236051,-0.3763787541926712,0.8543119377341144
0.43351516366039694,0.7398692242340671,-2.051520495519831,-0.11255432280792645,0.486482535975181
0.7062611254243969,-0.8742952528525513,-0.949936966155092,1.026769432213734,-0.2206092457495945
-0.6206125249371559,0.6829705592068694,-1.4528974095302647,-0.7258828909508175,-1.0603272215784862
0.5786636030014494,1.193137200125679,0.24233098468524406,-0.20080412158115418,0.9684559952228317
-0.8499424538413313,0.4753967016842136,-0.5895554540803558,0.3155896754451835,-1.2458635511274396
-0.41986927347692604,0.3871283103993654,1.4062629716702801,-0.4738409616416006,0.09928227548877117
0.0034715166284262386,0.45997389576477504,1.0963266280732638,0.6100889063020974,-0.9541007208824103
-0.6673325565215965,0.9782713852606515,-0.18333948425487678,0.4652434633111331,-0.20349808653887752
0.7503552690500626,-1.5983013910238981,-0.5695406291180751,0.2378742074957018,0.904671040127883
-1.6785786661774258,-0.8778106992214841,0.8659814287054027,0.23732370644226347,0.2187316043542564
-1.0547505376347694,0.4136748586127536,-0.15634401357500116,1.7565030694365296,-1.6969633182710127
0.19649276015839937,-1.5940671538482811,-1.8083383508466462,0.4610031437047527,-0.0032898147830267015
0.237967147017859,-0.4440900550557659,1.4166907706392924,0.4779112357352891,1.205752250769168
-0.4359610213291542,-0.09150177395509425,-1.511459383912345,-0.5604113670035021,0.8538114352214805
0.02568309095796454,-0.3255363541351479,1.8507846575082438,-0.5631861849914944,-0.26028399048404705
0.36479963852060676,-0.22121871253756747,0.2901587886108492,1.7944416974538104,0.9633032621900012
-0.07938076588082868,0.15921547051800689,0.3978412596470603,1.7672140617280077,-1.4978245501003473
2.067121242616635,0.22164464162835387,-1.5898898045739402,-0.265407052865588,1.3619262766359523
0.5116389698752244,1.4277592335989402,0.9453635839767013,-0.7871240388907398,-1.5107537597504037
-1.8974767414255498,-2.4649693805480317,0.40078829297910223,0.1731373727773527,-1.8570973758949192
-0.7371860334657185,0.6286471958265211,-0.6333240359174982,1.6466448638824007,-1.9450741023307143
0.571436116554843,0.15093195570262166,-1.6961204816025812,-0.262570586848714,1.3178614107261843
-0.8745562571940033,-0.9378125692503686,0.5767974743664647,1.0786281889666003,0.20061604639904546
-0.2772291447661509,-0.5306046398689251,-0.30515518634579025,0.45166994724740317,0.9627809442720237
-0.02798961160137919,1.4112872614800185,0.36305848486323866,-0.5184367150008117,2.001703139029678
1.3113241974399705,-1.2081817145966431,-0.346293833652137,-2.492370632144328,-0.5090499368921971
1.720698955668194,-0.4463046813527015,-0.6828279540727801,0.7084530316434237,0.9105580724555211
-1.913720632122567,0.4181145303486173,0.856815640343505,0.786813184421125,-0.26567994569112785
0.8533859033008061,-0.8590599034884809,-0.5722981435112628,-1.016211796110211,-1.9163356363125836
-0.28569970342159856,-0.7727918002874167,-0.5026117641321115,0.23893198584534633,0.808216769084634
-1.0383229728996837,-0.7795332255937838,1.259875111947643,-0.39076968112734434,-0.09577623065991046
0.4200552679260001,0.903308300145915,0.5125973954140001,0.16543033107024482,-0.2930275256287584
-0.442408381899434,-0.9335696634193875,2.348800410694445,0.7222897063901165,-0.8875793991131006
-1.0934457408115894,0.06178798115336641,-1.6777715176611168,-0.028082223774256034,-1.416883830997839
0.8037141980035852,1.1710456218206198,1.2278215604072988,0.6056831106065811,-0.9457918332732815
-0.33543347048882843,0.9729422762875408,-1.4179204067055498,-1.515120193859299,0.9507948546563207
0.8220044768408795,0.6853517777803828,-0.19785537252560054,-1.1616202271011735,0.6589169384204957
-0.7826030992225482,-0.13509239178765942,-0.907228828807869,-0.7110144362027633,-0.56848175821037
-0.16659778157486607,-0.537696049576639,0.40490988360130215,1.210169562246559,-0.04985747151451531
-0.33787060469234415,-2.0746829432343183,0.8402698908576535,0.26478316933359186,-0.7294682731464011
0.7671103375077251,0.6358581939088639,-0.8462122302170751,0.287045690428282,-0.1675552797802041
-0.4661502143316829,0.9563146458045787,0.04634003538530794,0.5365470386892918,-1.5350411420791679
0.00452282086004923,0.7363802717515769,0.2286468497291386,-0.12335630394922165,1.7202001711166164
0.18591355503138193,-0.28682733467882915,-0.17957342796637668,0.8203647771887169,0.20745918996974536
-2.2192739665012464,-0.591622429825022,0.9947173044188892,1.0577906227686533,0.7576935935239587
-0.419443353684874,1.66042826364025,0.6105968335957871,-0.3740553632483247,0.3988906060770982
-0.6939815082987411,-0.21467399951622052,-0.145569194892816,0.0069552963751541505,-0.18443753586565564
1.1552366687595486,0.08197791944212027,1.364440667056011,-0.06739008728810408,-1.8156748552084758
-1.797403871419006,-0.7040619105943356,-1.4218397300451868,-0.3009901111061022,1.535151535484322
0.7676490801199806,-0.7623390458433489,-0.02021811088541465,-1.295066500983356,0.3716191265748296
-0.6535022269667119,0.20570855960219697,2.4794613205429386,0.6918725358217299,-1.532300040177501
-0.6385711106706586,1.425227987828909,1.187251410901813,-0.6858098791477848,0.8217006550124595
-0.2155753900984886,-1.477858757998855,-0.046849799817772625,-0.3855723013991447,1.2893951918213071
-0.09498159073412935,-0.9808924447043774,0.7118776345132305,-1.0011153887180413,0.7699363452674224
-0.6198596006218968,-1.5320141032244614,-1.0487853841619172,-0.4330045635952257,-0.5594295122333294
0.2080230619206172,0.11842201953482379,-1.0653570315142342,0.7925397770316028,-0.1399527620677195
1.0821881518725645,-0.16616702238377914,-0.14146369840012177,0.5795374568159694,-0.8422216880167817
0.24745619112158287,0.6199247776830135,1.8818477149323976,1.6217008329226354,-0.728548956509943
0.5264833272564425,-1.5739926386003749,0.9939241198301683,1.1664345290614433,0.8977948638821464
0.07262812596564606,1.8956779700022235,-0.12383236135129176,-0.9895538850356566,-0.32639611534089297
1.2540528457103843,-0.23420042996299767,-0.6410909118459545,-0.285894047210861,1.0953731425320463
-0.5436311860591251,0.5534546576642703,-0.7619241593044271,0.40565377950676806,0.49770771998380253
0.18958289814845403,0.7269703126865223,0.7346492364064092,1.184429153984629,-1.2314343644567642
-1.0705416543711335,1.1615225795583897,0.44193731106294765,0.6921690887417703,2.0836538897811363
-0.5232969218407966,1.6298892105086829,-0.059014932308993545,-0.6654379853701881,1.0566460067795564
-0.3410746178373859,0.31425935793416754,-0.1215321305483356,-0.5617893797999993,-1.0216414706937949
-1.0222865610953404,0.07616541176783805,-0.7008495853337849,2.185494324446881,-0.13916474510784566
-0.6548682271197476,1.1695145533359763,1.7659754656513327,-1.5117049646570608,-0.2906947095808304
-0.562410947595219,-0.001230837506453162,-1.9081500682788421,-0.9858960782902092,-1.4684384506922357
1.5241833349388396,-0.4510632905158034,-0.5990895742204388,0.23634835636200965,-2.4869164615947112
-1.165011068607852,0.7989688125877146,0.6252291371407874,-1.3450175786162297,0.5490300517853155
0.42051896264985195,0.6009199611273006,0.3345593160619451,-0.050467951829052185,0.29534576762853565
-1.124496029035641,0.2854802944025359,0.36534001226094737,0.5099242533568852,0.4962142461368522
0.11907141781534278,-1.5961790908848519,-0.8263589408850432,0.4897151316555919,1.4886169512641614
0.5514037356838744,0.5524387434920001,-1.3687797917310012,0.21187095716944487,-0.8779207931484161
0.3222952049775929,1.2822243381660379,0.8069047956390706,0.4816932818671006,2.45503602005408
0.3745652462483049,0.1930644616419632,-0.31025583121637973,-0.25530557630159423,0.8187893760181718
-0.3990794231667412,-1.459344952158818,0.9675848571270065,0.2555644139472794,-0.5467192499241463
-0.4430885289678897,0.5090378734246676,-0.20161138335760703,0.5868017576258437,0.3354892603895782
0.2897492385094189,0.33585508542959197,-0.3147333516362448,-1.5231925991993314,-0.2216989011701238
-1.1606913200029432,-1.5846423664423568,0.1823883947722728,-0.5303038291251312,0.3763945246808955
-1.0811980386501028,0.39689238403692206,1.6014561427634417,-1.1191851968510136,-1.7266509732214768
0.44661156918380884,-0.34141929430571927,-0.9165618567838781,0.5591301971886052,-0.07550597949591746
0.14689460875937232,-0.5406153886912075,-0.8237684960281477,-1.0539665733042176,0.21895259702109973
1.0087706685959705,-0.11168776373935539,0.8806022636580554,-0.9117106276638769,0.7471111154228806
-0.6739588741899192,-0.06574434125308794,-0.36891850775396867,-0.08688903310107905,0.2946066781974649
0.7833735498693064,1.0012231631694428,-1.265597213424515,-0.19199089912457665,0.652577474946213
-0.26731293206696016,-0.4659078103981388,0.5542200723264953,-0.04952881085555332,0.6000973805203299
-0.3533797586534072,0.4574185475495676,-0.700806710743534,0.677664621119773,0.268322072360065
0.7582130744406301,-0.08502412145197923,-1.4798701305164403,-0.38400034449532755,-0.2774393066083856
-1.2703299053685686,-0.07986823754571766,1.337091508023205,-1.0954672730603032,0.2278590963265127
-0.7731194062465087,-0.3369302234916658,-0.37884734547725407,-0.20428674271582414,-0.6306900425371764
0.7975401528677404,1.2449029704710413,-0.6840423116485691,-0.6443928954134991,-0.6749558786238291
-0.763671425965616,-0.35160450659081655,1.4256144786215281,-0.16248123830702993,0.04075106104513213
-0.4978227148763647,-0.8608048872406218,0.41084683149492623,0.5162029414135394,0.5738066902266346
-0.22496557438354944,-1.2667860480527908,0.5237994597811446,0.39679668782387306,-0.03699453832143875
0.34063859335504576,0.789566879284942,-0.13333403378643138,-1.5682510666991722,0.7082696345596308
0.3206211565875485,0.16246938403968234,1.764990259121106,0.1678393028114061,-0.4244467693244848
1.3169902704320984,-1.0276103515184198,0.458103512277773,1.48606792991241,-1.0372318124017628
-0.6607781267305853,-1.0514142750140898,0.7784315790440842,-0.5484397273417447,-0.18157058294475897
2.1350571260600066,0.23766955458536318,0.5081570743686843,-1.3694690752696825,1.5807200868780211
0.578738959159635,-1.0749326937071477,0.3339310303961263,-2.0633023781676116,-0.03879217813652124
-0.02416616826446882,-0.5719214321255547,2.1685260922586735,0.39806172761472636,-0.126497257814085
0.9334020907817975,0.5394266164143904,-0.8263806640570552,-0.41836010176936117,0.31914021760546896
-0.03291920877172609,-0.8439660333795123,0.05804203044960155,-0.39558353571402816,0.6172023671587162
1.4483065134351287,0.09550231907558113,0.05533834469548545,0.17060576589304968,0.40312792175341916
1.2047362302881803,0.28875223087918384,-1.8098268374661741,-1.6219541207376988,-0.32239117875137113
-1.3479851971889096,0.6807608323479825,-0.3978760904184457,0.2997728398997087,0.757319457952769
0.6269226811325752,0.5971702445993764,-0.32259913436121346,1.1862608175923104,0.5134103170901452
-1.0790396327515959,0.11762835679806263,0.29196586070968916,-0.5758586087361024,-0.737588034690646
-0.06246486538027801,-0.947527186608597,1.5159961327110016,-1.3887661361713153,0.5022507946011443
-0.3792000169768052,-0.07865959080916494,0.24944339903658846,0.176639799572606,0.44101693506862055
-0.21712574866107479,-1.5984410398546336,-0.9490315185475303,-0.5290665920021373,-0.657427700732612
-1.3412752024389336,-0.1588704664477717,1.3340003934953168,-0.037310825994790076,0.8886393288288023
0.3963938779336265,0.2517969689235645,-1.3859301875537866,0.9888098587116502,-1.9681663756294285
0.3728987082724922,-0.9656554353301827,1.1676448760185552,0.3790414035839362,0.5623448611811845
-0.8589963514208276,-0.46666340495005165,-0.6606681051420978,0.6657909626517804,-1.5819000069174836
-1.9828860599314757,-1.4729214014300789,1.2725572201067428,0.6404266496279026,0.21881569748105903
0.1921279870210558,1.064889046393339,1.379833773054822,1.208708585141052,0.9464628422453863
-1.1755401140298445,-0.36176533720266846,0.3076703982014972,-1.4365369615435275,0.42533242291647555
-0.45008691000310835,-0.4840095443179435,-0.29379257470249537,-0.5965669754139584,-0.43641074153627396
-1.4912889052845455,-0.23099730421138737,-0.7089065468183297,0.6208920543153733,0.32662108902698506
-0.8601061409832021,1.0167546931655083,0.31869382694245013,-0.9984671939820836,0.946544582101188
0.4605515193133595,-0.680526614412414,-0.3201276903343484,0.8419632810900757,0.06084596551909394
1.5274721399238458,0.5382256752774242,-1.297615435898815,-0.48519650458903313,1.3679245713872734
0.3512907229048073,2.926788449197571,-0.2069499819827466,0.22331417161738498,-1.0641383608761812
0.5642481804610359,-1.0256317651752174,-2.5178320929822093,1.0821993969870871,1.254937110273635
1.5167954754682704,2.8401545233754195,-0.010035761244892727,0.24213519750488618,-1.742421135822546
-0.418236314877728,-0.16941268434184975,0.9809829868307486,-1.0595421239517957,0.0949945502464721
0.8934810441050582,-1.5008800900280137,-0.4281669808270502,0.6172451270359786,-0.7251424454637191
-1.0556306856371194,-1.0804620260543116,-0.6295988521337043,-0.9669356417393221,0.7087490271839865
0.8530051340476359,0.8976396055226044,-0.321474019634006,-1.492158048739472,0.04812943296463474
-0.1257921394546371,-3.075433810099974,1.0478063030623381,-1.0828007772234516,0.10031630514621219
-0.9490697191393056,0.2202722508572701,0.12600765905384775,1.926970856102132,2.2353883587287764
1.3319693654881166,0.30851304136012764,-0.5168188184544743,-0.6197375005134956,-0.6174627638260188
0.026105271816155864,1.416124965353408,-0.034755667320403354,0.22810385409855807,0.2787075623353503
-1.0704388682587798,1.2079173218247652,-0.3615126211404286,0.8518041676641617,0.3123376323111012
-0.0658058364169593,-0.47568650729867007,0.3084713715497943,0.057368484165341384,0.21064962726440511
-0.6698763095690025,-0.7165747801489143,0.2173609819353865,-1.141143812640859,1.7964495619362966
-1.3507326036100111,0.19921407283473935,-0.49059518315176914,0.4493037426139116,-0.48157388482145036
1.218697785435301,0.4297507862298554,-0.5295548812150516,1.1398272620512941,0.11310642317146614
-2.186617903307786,-1.642536669832825,1.11354354526185,0.9596712994400799,-1.3005781623964843
-0.009654349154690052,-2.718239712882799,0.26886602063022486,-1.3488213790465764,0.3458083436137645
-1.1660214486563953,1.3003388773410491,0.616120896596592,0.09994403880068777,0.7438288897135932
1.4443334565643988,0.5978540973163867,-1.3742576177529167,-1.958233418010781,1.4712617981234315
0.5665833869935473,-1.9298624440307852,0.7916865395405686,1.1617020772409008,-0.465353594089345
-0.4427596978773968,0.027713708398485724,1.1392295489584072,-0.2312522078308209,1.1541148862537762
-1.5775535136183427,-0.11743413576849726,-0.8715461617238314,1.561789714334601,-0.4009542410252115
1.0983902190018795,0.6870915135779436,0.1104714104537591,1.4296706772650336,-1.447847458953623
-1.4567394809001883,1.053985815752608,0.3733475287330676,-0.13519043959591226,-0.5931736611215777
-1.357097468618616,-0.3686364999521857,1.8347863063815941,-0.7676567698440293,0.11833200741577583
1.2583998995438466,-1.2043025185110887,-0.7689018841247717,-0.18247377060889963,-0.3344786464043994
0.373048833638573,0.2207348930160379,1.5862885822832045,-0.03307571493403968,1.0977959508210509
1.4774461908883092,0.553596417192863,2.543261607341474,0.5870388429722985,-0.5343693602698772
0.6182022838592579,-0.4186351649496371,-1.4312609486077987,0.3323468117549843,0.5354280837156673
1.1012412820131192,-0.9259787747726272,-0.32322719229220603,-0.5546771037342944,-0.4479269283031704
0.6283165061647261,-0.9212426646365417,-0.19157515557043023,-0.48740704532193024,0.8037731347530246
0.22825854298389617,-0.33857752496860466,0.3864870031785445,-1.2738365214514562,0.7904369574007968
-1.596500914956888,0.023270049140085368,-0.12991279511298282,-0.5370663101745392,1.4664353590536376
-0.64797923496256,-0.8645933577820422,2.621654915009818,0.09928414465870297,-0.7710901788477725
1.482429613623183,0.3399307398286616,0.053816963902097815,-0.18760345714276272,-0.7768764254194744
-0.3994900100578278,-0.9080744856584537,1.1967809080884027,-2.2665958165909372,0.8180633604028716
0.43441504141899984,1.5489342937523136,-1.1498142510803149,-0.3258353975888538,-0.5431955010200838
-1.0324629260203921,0.1737132730545129,0.9020994703725191,1.5713969986573193,-0.33426903283086845
-0.03177781472959213,-0.6605002465022941,0.9089981043299639,0.47024274042126696,-0.30066710428141297
0.8797504958271186,1.632216707694181,0.4902942697889905,-0.25262432150702907,0.07001025286783666
0.2337755763308409,0.07841174558081153,-1.2283389391460127,-0.3813981739945359,0.7991005078618163
0.5557886612509679,0.4795917948236854,0.685066889066054,-2.2856471874057855,0.39627897806226947
2.14218786729813,-0.20068171186764378,2.113123614433999,-0.7689758370289296,-0.0683958968468841
-2.036267775298395,-0.845206819563718,0.26484825471763185,1.5873678731435914,-1.3889198726336243
0.35956874071550904,-0.13351646169092918,0.8167140905058725,1.345836897941641,-1.3344082976996137
-0.13410200654711849,-0.20147926546885678,-0.2639102937771063,-1.1652973886866436,0.576570626724196
0.3102186122539183,-0.6381245911314006,-1.1610296863569647,-1.2744078863755763,-0.03674415988584973
0.2792776381788069,-1.7321422883987796,-1.652065090360556,0.74096446489369,-1.1958805597361974
-0.045161097857814914,0.4839104199372341,-0.5860915565785394,-0.4226469574526577,-0.6174046117757699
0.0288464784624053,-1.0471071704544748,0.13963445548724088,-1.255430375133917,1.5795074100685536
-0.5471189266146596,-2.263615839242334,1.1120001162825068,0.5390731269726071,0.9043581594640088
-0.7862866436830702,-0.9379098262598069,0.9690585640285297,0.2425257665112924,0.9103176792684012
-0.10598537703950715,-0.12455145649806894,-1.495091614622104,0.21133516481541528,-0.8944777587866318
0.48846556934705226,0.6683159195229252,0.8898119637261839,0.45582632415493574,-0.3483382483247582
-0.20510887987926466,0.6027446928216614,-1.0213843696633549,-0.01904327810316147,-0.4691859675275433
3.1421616887711328,0.6471923040701729,1.4899630524216823,0.7062177973940162,-0.15266662706997153
0.4041376135768822,-0.20652567371778635,-0.9165051875340967,0.0132018947233619,1.1274755696414611
-0.3001148729607817,-1.3660818415568516,-0.6086035991412649,-2.0690481991157244,0.1359193688422261
-1.0452669841152251,1.1312062843334643,-1.400022041129453,-0.7774615836046633,0.2744037526748771
0.084650794109659,0.659704650795016,-0.2560130975906458,0.34570359592192773,0.8727859919494946
-0.9430336437299928,-0.15422014173359672,-1.2356087182656137,1.5318109482469815,0.22960330595862125
-0.08258506150525158,-0.6473515034008092,-0.9701295913077345,2.2005215956309563,-0.11030461173845676
0.3792720974234096,0.5881592679643749,-0.4315159805591915,-0.7261750666981555,-0.5864751980737754
0.3844411831623875,-0.8583001394947293,-0.10513233046561227,0.055227704944246156,-0.7165159191781342
-0.7966906647898782,0.06410534723543183,0.9222933116893831,0.6156190169623414,2.011039761853972
0.5268087366776261,-0.5502403688230508,-1.8683282854102021,0.7007800960109364,2.2947711418771943
-1.1000220759459252,-0.6746534060702036,0.3339473837728641,0.7772505657883375,-0.415032427916684
-0.03234028715141763,0.7347328274640289,0.21731448129428058,-1.308361560945888,0.1385393461238744
-1.1161008751525825,0.40436033431331286,-0.669240866049301,-0.5274216651337147,-0.3494581287458608
0.027191737939503365,-0.06519634177934086,-1.3718010652711499,0.4696138793759669,0.961015784393008
-0.769235779890184,-0.995351234383404,-0.3338445397824444,-2.1421653176584914,-0.5628886415768011
-0.24447324580590815,0.027498813502613625,-0.2432896171964397,-0.050458294150444005,-1.1062768356506951
0.6081660082074338,0.8372510671181858,-0.9832091779857741,-0.3369197478596237,-0.02305906549986051
0.2780311206700768,0.7845400143228781,-0.31210602147228245,-0.6047178883510439,-0.7803388639716292
-0.5341090592940692,-1.633693586970832,1.2458301669890988,0.005588011096048265,0.8587765170846113
-0.8807362002170045,-0.47582479132249467,-0.9109687136620859,-1.447894712665787,0.22567386277284507
-0.3692750727217163,1.6567374972201383,0.9135393679184617,1.0743004221268213,-0.5314048934780277
0.5208530618757825,0.6266003729278811,2.5844224628948416,3.5519161030971724,-0.7891333945040822
2.000129585456891,-0.5320994976484964,0.7299373965823793,-0.7171395366285271,-0.9377072442372066
0.524460043406395,2.6458400601958663,1.5451453339393868,0.05578052772738356,1.2105996205836522
0.5945414659713274,0.2549036725305333,1.2084379803967062,-0.35111377454241877,0.5503137543825829
-0.5421774093599127,2.443172059747115,-0.1370847981794718,1.4114016096077537,-0.47654856443683363
1.0514832103501142,-0.7140083494906972,0.6679517487518508,-0.5060320672733,0.10799855709597553
1.1841505034489623,-0.751455361811695,-0.3264329213886847,0.733894876986727,-0.9768317564402065
0.5028461092421652,-1.1870824429743316,-0.3733991786681471,0.7170720021518547,-1.2831820624765655
-0.8493238165043363,-1.2731514163948714,-0.537023865579193,-1.1395841456056164,-1.4091419284742972
0.28601438633393994,-0.311878478721125,-0.3678938479328114,1.1989282346895278,-0.45789349344219393
2.3009493219885804,-0.5880189461061827,0.342531129628969,-0.7193776634758232,1.7429515811645453
0.19562021694777798,-1.2344799663065886,-0.7365205238451121,0.7495923297108967,-1.767723123642722
0.3417635428035451,-1.9542677927068792,0.2401293495676758,-1.4443173772169577,0.7551914448043814
0.7600683324821068,0.3243326239461689,1.3888203673337678,1.6584702388866812,0.24284999138589686
-0.2943434760591998,0.1033570965600951,-0.8636144777629045,-0.6150856355392478,1.0963763481869058
1.7299234404897368,-0.30467181443677943,1.11032644500407,-0.5381275046580521,-0.05858697374922202
-0.8319969719665664,-0.30667251535705237,0.7589814914762215,0.38145015096389095,-0.1687217834581056
1.354442536520187,1.8385741705222043,0.42749305739230126,1.879802260996808,1.1351016656525494
-0.8535800633448097,-0.24982955835237095,0.23182380256483076,-0.14559495865041974,0.5783852273772488
-2.2480391999593987,-0.7401269272949856,-0.6812585629511871,1.2550977914680426,-0.6719693021243653
2.563765247796549,0.5241747551512976,0.056039142917480474,-2.2517629339455736,-0.9913890058179214
-0.13214997601302345,-0.3890593683697584,1.3369958582350054,1.3995100124385254,0.6242328727063723
1.1503804739383532,-0.331718299688597,-0.30508630818781546,-0.23433633394859468,-0.02331383694865224
-0.3748353830414485,0.42681956155532313,0.44943240321651606,1.9972043653947782,-2.1658969993291413
0.7718060920486685,-0.47523260376507664,0.4470973687126579,-0.381174949550114,-0.2270350534441872
-0.7393552573157376,-1.3873806559898962,0.1110067091180287,1.5815484654609926,-0.7079905526463194
-1.5582904761097345,-0.39144056484560635,-0.24622217664903895,-0.21143762214366324,2.6720865252385573
-0.42085716465168943,0.29589375736740137,-0.5906469728707249,-1.9939284675111806,-0.010805505467044616
-0.7753875015360356,0.45875678437660794,-1.694500300605348,-0.12055058443766518,-2.1510024802692627
-1.4264227151463316,1.4084186690052944,-0.052520077002992535,-0.6901485086381554,-0.05615419827141536
1.1076377846249184,2.1110582823827766,-0.8243337566800597,-0.8276052805637929,1.530900263406889
-2.279682385882449,0.9320443786434203,0.41322677979017264,-0.0956183588621099,-0.37130594254655896
-0.46672965978111797,0.6735368110743934,1.2922243615647964,-0.9021283304667591,2.9366840077876812
-1.2154006811668614,0.2609821788629029,-0.7884138773173731,1.0218329024415693,4.1386787533588345
0.16950338075171908,-0.870551114785349,0.3114548196983192,-0.7004985906143668,0.7548189260529294
-1.013150997301702,-1.3156687837937477,-0.7309638840107978,1.6269396861905159,0.350986391638276
-0.11933604955477611,0.6278075897485216,-2.1334668675890596,0.18989834702377179,-0.1811712997076543
0.459308778750969,-1.6687027588126178,-0.6572244421279042,0.2601868382271975,-0.7439998079390928
0.1003459465562323,1.1795705819970477,-1.4753692265676421,-0.020202104417920082,0.019566494383501908
0.09351665579913941,-1.4131623824968726,0.24115142813238857,-0.41236009983242156,0.8426415068894093
0.25503682662357297,-0.7522354052065701,-1.2903174228961838,-1.2213264045756633,-1.2006007714465905
0.5996631498138879,1.2991225808811238,-0.8223385573687297,-1.19519196994913,0.18844716862182828
-0.9109452731825953,-1.6476638009930449,-1.233539017250901,-1.4189000879869904,-1.0424252761230206
0.6808923187353922,0.1252704625673512,0.6417769463793935,-0.5027191880055814,-0.37913872590652636
-0.884703098561255,0.9022480676976978,-0.2994801806947326,0.48618069544820597,2.5744733191579736
-0.04732775974804121,0.8734846087329217,0.04226952027698635,-1.5426813806149258,1.3893013630133773
0.00259015243418718,1.835344793354383,0.7227899590683228,-0.6468656149351091,-0.6718345462614745
1.2724496974924633,0.4169758323262176,-1.2486501297724064,0.9269016227106669,-0.9676495430139942
0.9482785035069703,1.2456675863218127,0.45578123442925905,-0.1304320559179962,0.0407695603212004
0.24498281969364807,0.002210901806619417,-1.8351226540177512,0.7540971113431095,-0.4643877230170104
0.023618886804308587,0.11797992960184149,-0.7331609933684552,0.7683436591013466,-1.739436653565881
-0.7453460829886794,-0.8895218460893425,-0.28687615245797904,0.07215604915165534,-0.7440262559542968
0.9786913682161318,1.4957690766130791,0.25164395532918266,1.2815778937608535,-0.3032836836714553
0.8172264347278805,-1.901461134911712,-0.8833215162061655,0.5764734378087812,-0.27332511122717756

+ 6411
- 0
GRU/GRU_data/data_128/X_test.csv
File diff suppressed because it is too large
View File


+ 25639
- 0
GRU/GRU_data/data_128/X_train.csv
File diff suppressed because it is too large
View File


+ 6411
- 0
GRU/GRU_data/data_128/y_test.csv
File diff suppressed because it is too large
View File


+ 25639
- 0
GRU/GRU_data/data_128/y_train.csv
File diff suppressed because it is too large
View File


+ 6407
- 0
GRU/GRU_data/data_256/X_test.csv
File diff suppressed because it is too large
View File


+ 25623
- 0
GRU/GRU_data/data_256/X_train.csv
File diff suppressed because it is too large
View File


+ 6407
- 0
GRU/GRU_data/data_256/y_test.csv
File diff suppressed because it is too large
View File


+ 25623
- 0
GRU/GRU_data/data_256/y_train.csv
File diff suppressed because it is too large
View File


+ 6399
- 0
GRU/GRU_data/data_512/X_test.csv
File diff suppressed because it is too large
View File


+ 25591
- 0
GRU/GRU_data/data_512/X_train.csv
File diff suppressed because it is too large
View File


+ 6399
- 0
GRU/GRU_data/data_512/y_test.csv
File diff suppressed because it is too large
View File


+ 25591
- 0
GRU/GRU_data/data_512/y_train.csv
File diff suppressed because it is too large
View File


+ 6413
- 0
GRU/GRU_data/data_64/X_test.csv
File diff suppressed because it is too large
View File


+ 25647
- 0
GRU/GRU_data/data_64/X_train.csv
File diff suppressed because it is too large
View File


+ 6413
- 0
GRU/GRU_data/data_64/y_test.csv
File diff suppressed because it is too large
View File


+ 25647
- 0
GRU/GRU_data/data_64/y_train.csv
File diff suppressed because it is too large
View File


+ 80
- 0
GRU/GRU_data/generate_data.py View File

@ -0,0 +1,80 @@
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
import argparse
import os
def create_sequences_from_X_y(X_df, y_df, seq_length, forecast_steps):
"""
Create sequences from input and output DataFrames.
Returns:
X_seq: (num_sequences, seq_length, num_features)
y_seq: (num_sequences, forecast_steps)
"""
X = []
y = []
for start_idx in range(len(X_df) - seq_length - forecast_steps + 1):
end_idx = start_idx + seq_length
seq_x = X_df.iloc[start_idx:end_idx].values
target_y = y_df.iloc[end_idx:end_idx + forecast_steps].values.flatten()
if len(target_y) == forecast_steps: # guard against end-of-data cutoff
X.append(seq_x)
y.append(target_y)
return np.array(X), np.array(y)
def save_to_csv(data, filename):
"""Save numpy array to CSV with proper formatting"""
if len(data.shape) == 3: # X data
reshaped = data.reshape(data.shape[0], -1)
cols = [f"attr_{f}_t{t}" for t in range(data.shape[1]) for f in range(data.shape[2])]
else: # y data (2D)
reshaped = data
cols = [f"target_{i}" for i in range(reshaped.shape[1])]
df = pd.DataFrame(reshaped, columns=cols)
df.to_csv(filename, index=False)
print(f"Saved {filename} with shape {data.shape}")
def main():
parser = argparse.ArgumentParser(description='Prepare real preprocessed X and y into sequences')
parser.add_argument('--X_file', type=str, required=True, help='Path to input X CSV file')
parser.add_argument('--y_file', type=str, required=True, help='Path to input y CSV file')
parser.add_argument('--seq_length', type=int, default=8, help='Length of each time series sequence')
parser.add_argument('--forecast_steps', type=int, default=2, help='Number of future steps to predict')
parser.add_argument('--test_size', type=float, default=0.2, help='Fraction of data to use for testing')
parser.add_argument('--output_dir', type=str, default='./data', help='Directory to save output files')
args = parser.parse_args()
# Create output directory
os.makedirs(args.output_dir, exist_ok=True)
# Load CSVs
X_df = pd.read_csv(args.X_file)
y_df = pd.read_csv(args.y_file)
# Generate sequences
X, y = create_sequences_from_X_y(X_df, y_df, args.seq_length, args.forecast_steps)
# Train/test split
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=args.test_size, random_state=42
)
# Save to files
save_to_csv(X_train, os.path.join(args.output_dir, 'X_train.csv'))
save_to_csv(X_test, os.path.join(args.output_dir, 'X_test.csv'))
save_to_csv(y_train, os.path.join(args.output_dir, 'y_train.csv'))
save_to_csv(y_test, os.path.join(args.output_dir, 'y_test.csv'))
print("\nData processing complete!")
print(f"Train shapes: X={X_train.shape}, y={y_train.shape}")
print(f"Test shapes: X={X_test.shape}, y={y_test.shape}")
if __name__ == "__main__":
main()

+ 179
- 0
GRU/codes/WideGRU.py View File

@ -0,0 +1,179 @@
import tensorflow as tf
import numpy as np
import pandas as pd
from tensorflow.keras.layers import GRU, Flatten, Dense, Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.utils import plot_model
#import matplotlib.pyplot as plt
'''
Hyperparameter Tuning:
Adjust number of GRU layers (n)
Try different learning rates
Experiment with different GRU cell sizes (m)
'''
callbacks = [
EarlyStopping(patience=10, restore_best_weights=True),
ModelCheckpoint('best_model.h5', save_best_only=True)
]
#python generate_data.py -k 5
def load_data(data_dir='./data'):
"""Load the generated CSV data"""
X_train = pd.read_csv(f'{data_dir}/X_train.csv').values
X_test = pd.read_csv(f'{data_dir}/X_test.csv').values
y_train = pd.read_csv(f'{data_dir}/y_train.csv').values
y_test = pd.read_csv(f'{data_dir}/y_test.csv').values
# Reshape X data back to 3D (samples, timesteps, features)
seq_length = 8 # Should match what you used in generation
num_attributes = y_train.shape[1]
X_train = X_train.reshape(-1, seq_length, num_attributes)
X_test = X_test.reshape(-1, seq_length, num_attributes)
return X_train, X_test, y_train, y_test
def train_model(k=8):
"""Train the GRU model on generated data"""
# 1. Load data
X_train, X_test, y_train, y_test = load_data()
# 2. Initialize model (using k as the output dimension and GRU size)
model = GRUModel(n=2, m=k, timesteps=8) # 2 GRU layers with k cells each
# 3. Compile model
model.compile(
optimizer=Adam(learning_rate=0.001),
loss='mse',
metrics=['mae']
)
model.summary()
# 4. Train model
history = model.fit(
X_train, y_train,
validation_data=(X_test, y_test),
epochs=50,
batch_size=32,
verbose=1,
callbacks=callbacks
)
# 5. Evaluate model
test_loss, test_mae = model.model.evaluate(X_test, y_test, verbose=0)
print(f"\nTest Loss: {test_loss:.4f}, Test MAE: {test_mae:.4f}")
# 6. Plot training history
plt.figure(figsize=(10, 5))
plt.plot(history.history['loss'], label='Train Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Training History')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
plt.savefig('training_history.png')
plt.show()
return model, history
class GRUModel:
def plot(self):
print(f'Ploting model')
plot_model(self.model, to_file=f'gru_architecture.jpeg', show_shapes=True, show_layer_names=True)
def __init__(self, n=2, m=16 ,k=8, l=2, timesteps=8 ):
"""
GRU Model with:
- n GRU layers (n >= 1)
- m cells per GRU layer (m >= 8)
- k attributes
- organized in timesteps
- Flattened to m^2 dense layer
- m dense layer
- l output values
Args:
n: Number of GRU layers (>=1)
m: Number of cells per GRU layer (>=8)
"""
assert n >= 1, "Number of GRU layers (n) must be >= 1"
assert m >= 5, "Number of cells per layer (m) must be >= 5"
self.n = n
self.m = m
self.k=k
self.l=l
self.timesteps=timesteps
self.model = self._build_model()
def _build_model(self):
"""Build the model using functional API"""
# Input layer
inputs = Input(shape=(self.timesteps, self.k)) # Assuming (timesteps, features)
# GRU layers
x = inputs
for _ in range(self.n):
x = GRU(self.m, return_sequences=True)(x)
# Last GRU layer (don't return sequences)
x = GRU(self.m)(x)
# Dense layers
x = Dense(self.m**2, activation='relu')(x)
x = Dense(self.m, activation='relu')(x)
# Output layer
outputs = Dense(self.l)(x)
# Create model
model = Model(inputs=inputs, outputs=outputs)
return model
def summary(self):
"""Print model summary"""
return self.model.summary()
def compile(self, **kwargs):
"""Compile the model"""
self.model.compile(**kwargs)
def fit(self, *args, **kwargs):
"""Fit the model"""
return self.model.fit(*args, **kwargs)
def predict(self, *args, **kwargs):
"""Make predictions"""
return self.model.predict(*args, **kwargs)
# Example usage
if __name__ == "__main__":
# Instantiate your model
model = GRUModel(n=2, m=16, k=8, l=2, timesteps=8)
# Compile the model
model.compile(optimizer='adam', loss='mse')
# Plot and save the architecture diagram
model.plot()
print("Model architecture saved as gru_model_architecture.png")
# Create model with 2 GRU layers, 16 cells each
#model = GRUModel(n=2, m=16)
# Compile the model
#model.compile(optimizer='adam', loss='mse')
# Print model summary
#model.summary()
# Example: Train model with k=5 attributes
#trained_model, training_history = train_model(k=8)
#plt.plot(history.history['loss'], label='Train Loss')
#plt.plot(history.history['val_loss'], label='Validation Loss')
#plt.legend()
#plt.show()

+ 142
- 0
GRU/codes/WideGRU_128.py View File

@ -0,0 +1,142 @@
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
from tensorflow.keras.layers import GRU, Dense, Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.utils import plot_model
from tensorflow.keras import backend as K
# Custom RMSE loss function
def rmse(y_true, y_pred):
return K.sqrt(K.mean(K.square(y_pred - y_true)))
# Callbacks for training
callbacks = [
EarlyStopping(patience=10, restore_best_weights=True),
ModelCheckpoint('best_model.h5', save_best_only=True)
]
# Load data function (assumes 2D X and standardized y)
def load_data(data_dir='./data_128'):
X_train = pd.read_csv(f'{data_dir}/X_train.csv').values
X_test = pd.read_csv(f'{data_dir}/X_test.csv').values
y_train = pd.read_csv(f'{data_dir}/y_train.csv').values
y_test = pd.read_csv(f'{data_dir}/y_test.csv').values
# Store mean and std for inverse transform
mean_y = y_train.mean(axis=0)
std_y = y_train.std(axis=0)
return X_train, X_test, y_train, y_test, mean_y, std_y
# AGRU model class
class AGRU:
def plot(self):
print(f'Ploting model')
plot_model(self.model, to_file=f'gru_architecture_128.jpeg', show_shapes=True, show_layer_names=True)
@classmethod
def log(cls, x):
return int(2 ** (math.log2(x) - 5)) # output = timesteps / 32
def __init__(self, timesteps, cells, attributes=8, layers=2):
assert layers >= 2, "Number of GRU layers must be >= 2"
assert cells >= 5, "Number of cells must be >= 5"
assert timesteps in [64, 128, 256, 512], f"Timestep {timesteps} is not allowed"
self.timesteps = timesteps
self.n = layers
self.m = cells
self.k = attributes
self.l = AGRU.log(timesteps)
self.model = self._build_model()
def _build_model(self):
# Accept input in 2D shape: (timesteps // attributes, attributes)
inputs = Input(shape=(self.timesteps // self.k, self.k))
x = inputs # Already in the right shape
for _ in range(self.n - 1):
x = GRU(self.m, return_sequences=True)(x)
x = GRU(self.m)(x)
if self.m <= 128:
x = Dense(self.m, activation='relu')(x)
else:
sk = self.m
while sk >= 128:
x = Dense(sk, activation='relu')(x)
sk = sk // 2
outputs = Dense(self.l)(x)
model = Model(inputs=inputs, outputs=outputs)
return model
def summary(self):
self.model.summary()
def compile(self):
self.model.compile(optimizer=Adam(learning_rate=0.001), loss=rmse, metrics=['mae', 'mse'])
def fit(self, *args, **kwargs):
return self.model.fit(*args, **kwargs)
def predict(self, *args, **kwargs):
return self.model.predict(*args, **kwargs)
# Training function
def train_model(timesteps=128, cells=32, attributes=8):
X_train, X_test, y_train, y_test, mean_y, std_y = load_data()
# Reshape X into 3D for GRU input: (samples, timesteps // attributes, attributes)
X_train = X_train.reshape(-1, timesteps // attributes, attributes)
X_test = X_test.reshape(-1, timesteps // attributes, attributes)
model = AGRU(timesteps=timesteps, cells=cells, attributes=attributes)
model.compile()
model.summary()
history = model.fit(
X_train, y_train,
validation_data=(X_test, y_test),
epochs=50,
batch_size=16,
verbose=1,
callbacks=callbacks
)
test_loss, test_mae, test_mse = model.model.evaluate(X_test, y_test, verbose=0)
print(f"\nTest Loss (RMSE): {test_loss:.4f}, MAE: {test_mae:.4f}, MSE: {test_mse:.4f}")
# Predictions and inverse transform
y_pred_standardized = model.predict(X_test)
y_pred = y_pred_standardized * std_y + mean_y
y_true = y_test * std_y + mean_y
for i in range(5):
print(f"Pred: {y_pred[i]}, Actual: {y_true[i]}")
# Plot training history
plt.figure(figsize=(10, 5))
plt.plot(history.history['loss'], label='Train Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Training History')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
plt.savefig('training_history_128.png')
plt.show()
return model, history
if __name__ == "__main__":
model, history = train_model(timesteps=128, cells=32, attributes=8)
model.plot()
#agru =AGRU(timesteps=128, cells=32, attributes=8)
#agru.plot()

+ 139
- 0
GRU/codes/WideGRU_128_cloud.py View File

@ -0,0 +1,139 @@
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
from tensorflow.keras.layers import GRU, Dense, Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.utils import plot_model
from tensorflow.keras import backend as K
# Custom RMSE loss function
def rmse(y_true, y_pred):
return K.sqrt(K.mean(K.square(y_pred - y_true)))
# Callbacks for training
callbacks = [
EarlyStopping(patience=3, restore_best_weights=True),
ModelCheckpoint('best_model.h5', save_best_only=True)
]
# Load data function (assumes 2D X and standardized y)
def load_data(data_dir='./data_128'):
X_train = pd.read_csv(f'{data_dir}/X_train.csv').values
X_test = pd.read_csv(f'{data_dir}/X_test.csv').values
y_train = pd.read_csv(f'{data_dir}/y_train.csv').values
y_test = pd.read_csv(f'{data_dir}/y_test.csv').values
# Store mean and std for inverse transform
mean_y = y_train.mean(axis=0)
std_y = y_train.std(axis=0)
return X_train, X_test, y_train, y_test, mean_y, std_y
# AGRU model class
class AGRU:
def plot(self):
print(f'Ploting model')
plot_model(self.model, to_file=f'gru_architecture_128c.jpeg', show_shapes=True, show_layer_names=True)
@classmethod
def log(cls, x):
return int(2 ** (math.log2(x) - 5)) # output = timesteps / 32
def __init__(self, timesteps, cells, attributes=8, layers=4):
assert layers >= 2, "Number of GRU layers must be >= 2"
assert cells >= 5, "Number of cells must be >= 5"
assert timesteps in [64, 128, 256, 512], f"Timestep {timesteps} is not allowed"
self.timesteps = timesteps
self.n = layers
self.m = cells
self.k = attributes
self.l = AGRU.log(timesteps)
self.model = self._build_model()
def _build_model(self):
# Accept input in 2D shape: (timesteps // attributes, attributes)
inputs = Input(shape=(self.timesteps // self.k, self.k))
x = inputs # Already in the right shape
for _ in range(self.n - 1):
x = GRU(self.m, return_sequences=True)(x)
x = GRU(self.m)(x)
if self.m <= 64:
x = Dense(self.m, activation='relu')(x)
else:
sk = self.m
while sk >= 64:
x = Dense(sk, activation='relu')(x)
sk = sk // 2
outputs = Dense(self.l)(x)
model = Model(inputs=inputs, outputs=outputs)
return model
def summary(self):
self.model.summary()
def compile(self):
self.model.compile(optimizer=Adam(learning_rate=0.0007), loss=rmse, metrics=['mae', 'mse'])
def fit(self, *args, **kwargs):
return self.model.fit(*args, **kwargs)
def predict(self, *args, **kwargs):
return self.model.predict(*args, **kwargs)
# Training function
def train_model(timesteps=128, cells=1280, attributes=8):
X_train, X_test, y_train, y_test, mean_y, std_y = load_data()
# Reshape X into 3D for GRU input: (samples, timesteps // attributes, attributes)
X_train = X_train.reshape(-1, timesteps // attributes, attributes)
X_test = X_test.reshape(-1, timesteps // attributes, attributes)
model = AGRU(timesteps=timesteps, cells=cells, attributes=attributes)
model.compile()
model.summary()
history = model.fit(
X_train, y_train,
validation_data=(X_test, y_test),
epochs=30,
batch_size=32,
verbose=1,
callbacks=callbacks
)
test_loss, test_mae, test_mse = model.model.evaluate(X_test, y_test, verbose=0)
print(f"\nTest Loss (RMSE): {test_loss:.4f}, MAE: {test_mae:.4f}, MSE: {test_mse:.4f}")
# Predictions and inverse transform
y_pred_standardized = model.predict(X_test)
y_pred = y_pred_standardized * std_y + mean_y
y_true = y_test * std_y + mean_y
for i in range(5):
print(f"Pred: {y_pred[i]}, Actual: {y_true[i]}")
# Plot training history
plt.figure(figsize=(10, 5))
plt.plot(history.history['loss'], label='Train Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Training History')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
plt.savefig('training_history_128c.png')
plt.show()
return model, history
if __name__ == "__main__":
model, history = train_model(timesteps=128, cells=1280, attributes=8)
model.plot()

+ 142
- 0
GRU/codes/WideGRU_256.py View File

@ -0,0 +1,142 @@
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
from tensorflow.keras.layers import GRU, Dense, Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.utils import plot_model
from tensorflow.keras import backend as K
# Custom RMSE loss function
def rmse(y_true, y_pred):
return K.sqrt(K.mean(K.square(y_pred - y_true)))
# Callbacks for training
callbacks = [
EarlyStopping(patience=10, restore_best_weights=True),
ModelCheckpoint('best_model.h5', save_best_only=True)
]
# Load data function (assumes 2D X and standardized y)
def load_data(data_dir='./data_256'):
X_train = pd.read_csv(f'{data_dir}/X_train.csv').values
X_test = pd.read_csv(f'{data_dir}/X_test.csv').values
y_train = pd.read_csv(f'{data_dir}/y_train.csv').values
y_test = pd.read_csv(f'{data_dir}/y_test.csv').values
# Store mean and std for inverse transform
mean_y = y_train.mean(axis=0)
std_y = y_train.std(axis=0)
return X_train, X_test, y_train, y_test, mean_y, std_y
# AGRU model class
class AGRU:
def plot(self):
print(f'Ploting model')
plot_model(self.model, to_file=f'gru_architecture_256.jpeg', show_shapes=True, show_layer_names=True)
@classmethod
def log(cls, x):
return int(2 ** (math.log2(x) - 5)) # output = timesteps / 32
def __init__(self, timesteps, cells, attributes=8, layers=2):
assert layers >= 2, "Number of GRU layers must be >= 2"
assert cells >= 5, "Number of cells must be >= 5"
assert timesteps in [64, 128, 256, 512], f"Timestep {timesteps} is not allowed"
self.timesteps = timesteps
self.n = layers
self.m = cells
self.k = attributes
self.l = AGRU.log(timesteps)
self.model = self._build_model()
def _build_model(self):
# Accept input in 2D shape: (timesteps // attributes, attributes)
inputs = Input(shape=(self.timesteps // self.k, self.k))
x = inputs # Already in the right shape
for _ in range(self.n - 1):
x = GRU(self.m, return_sequences=True)(x)
x = GRU(self.m)(x)
if self.m <= 128:
x = Dense(self.m, activation='relu')(x)
else:
sk = self.m
while sk >= 128:
x = Dense(sk, activation='relu')(x)
sk = sk // 2
outputs = Dense(self.l)(x)
model = Model(inputs=inputs, outputs=outputs)
return model
def summary(self):
self.model.summary()
def compile(self):
self.model.compile(optimizer=Adam(learning_rate=0.001), loss=rmse, metrics=['mae', 'mse'])
def fit(self, *args, **kwargs):
return self.model.fit(*args, **kwargs)
def predict(self, *args, **kwargs):
return self.model.predict(*args, **kwargs)
# Training function
def train_model(timesteps=256, cells=64, attributes=8):
X_train, X_test, y_train, y_test, mean_y, std_y = load_data()
# Reshape X into 3D for GRU input: (samples, timesteps // attributes, attributes)
X_train = X_train.reshape(-1, timesteps // attributes, attributes)
X_test = X_test.reshape(-1, timesteps // attributes, attributes)
model = AGRU(timesteps=timesteps, cells=cells, attributes=attributes)
model.compile()
model.summary()
history = model.fit(
X_train, y_train,
validation_data=(X_test, y_test),
epochs=50,
batch_size=32,
verbose=1,
callbacks=callbacks
)
test_loss, test_mae, test_mse = model.model.evaluate(X_test, y_test, verbose=0)
print(f"\nTest Loss (RMSE): {test_loss:.4f}, MAE: {test_mae:.4f}, MSE: {test_mse:.4f}")
# Predictions and inverse transform
y_pred_standardized = model.predict(X_test)
y_pred = y_pred_standardized * std_y + mean_y
y_true = y_test * std_y + mean_y
for i in range(5):
print(f"Pred: {y_pred[i]}, Actual: {y_true[i]}")
# Plot training history
plt.figure(figsize=(10, 5))
plt.plot(history.history['loss'], label='Train Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Training History')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
plt.savefig('training_history_256.png')
plt.show()
return model, history
if __name__ == "__main__":
#model, history = train_model(timesteps=256, cells=64, attributes=8)
#model.plot()
agru =AGRU(timesteps=256, cells=64, attributes=8)
agru.plot()

+ 139
- 0
GRU/codes/WideGRU_256_cloud.py View File

@ -0,0 +1,139 @@
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
from tensorflow.keras.layers import GRU, Dense, Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.utils import plot_model
from tensorflow.keras import backend as K
# Custom RMSE loss function
def rmse(y_true, y_pred):
return K.sqrt(K.mean(K.square(y_pred - y_true)))
# Callbacks for training
callbacks = [
EarlyStopping(patience=3, restore_best_weights=True),
ModelCheckpoint('best_model.h5', save_best_only=True)
]
# Load data function (assumes 2D X and standardized y)
def load_data(data_dir='./data_256'):
X_train = pd.read_csv(f'{data_dir}/X_train.csv').values
X_test = pd.read_csv(f'{data_dir}/X_test.csv').values
y_train = pd.read_csv(f'{data_dir}/y_train.csv').values
y_test = pd.read_csv(f'{data_dir}/y_test.csv').values
# Store mean and std for inverse transform
mean_y = y_train.mean(axis=0)
std_y = y_train.std(axis=0)
return X_train, X_test, y_train, y_test, mean_y, std_y
# AGRU model class
class AGRU:
def plot(self):
print(f'Ploting model')
plot_model(self.model, to_file=f'gru_architecture_256c.jpeg', show_shapes=True, show_layer_names=True)
@classmethod
def log(cls, x):
return int(2 ** (math.log2(x) - 5)) # output = timesteps / 32
def __init__(self, timesteps, cells, attributes=8, layers=4):
assert layers >= 2, "Number of GRU layers must be >= 2"
assert cells >= 5, "Number of cells must be >= 5"
assert timesteps in [64, 128, 256, 512], f"Timestep {timesteps} is not allowed"
self.timesteps = timesteps
self.n = layers
self.m = cells
self.k = attributes
self.l = AGRU.log(timesteps)
self.model = self._build_model()
def _build_model(self):
# Accept input in 2D shape: (timesteps // attributes, attributes)
inputs = Input(shape=(self.timesteps // self.k, self.k))
x = inputs # Already in the right shape
for _ in range(self.n - 1):
x = GRU(self.m, return_sequences=True)(x)
x = GRU(self.m)(x)
if self.m <= 64:
x = Dense(self.m, activation='relu')(x)
else:
sk = self.m
while sk >= 64:
x = Dense(sk, activation='relu')(x)
sk = sk // 2
outputs = Dense(self.l)(x)
model = Model(inputs=inputs, outputs=outputs)
return model
def summary(self):
self.model.summary()
def compile(self):
self.model.compile(optimizer=Adam(learning_rate=0.0005), loss=rmse, metrics=['mae', 'mse'])
def fit(self, *args, **kwargs):
return self.model.fit(*args, **kwargs)
def predict(self, *args, **kwargs):
return self.model.predict(*args, **kwargs)
# Training function
def train_model(timesteps=256, cells=1280, attributes=8):
X_train, X_test, y_train, y_test, mean_y, std_y = load_data()
# Reshape X into 3D for GRU input: (samples, timesteps // attributes, attributes)
X_train = X_train.reshape(-1, timesteps // attributes, attributes)
X_test = X_test.reshape(-1, timesteps // attributes, attributes)
model = AGRU(timesteps=timesteps, cells=cells, attributes=attributes)
model.compile()
model.summary()
history = model.fit(
X_train, y_train,
validation_data=(X_test, y_test),
epochs=35,
batch_size=32,
verbose=1,
callbacks=callbacks
)
test_loss, test_mae, test_mse = model.model.evaluate(X_test, y_test, verbose=0)
print(f"\nTest Loss (RMSE): {test_loss:.4f}, MAE: {test_mae:.4f}, MSE: {test_mse:.4f}")
# Predictions and inverse transform
y_pred_standardized = model.predict(X_test)
y_pred = y_pred_standardized * std_y + mean_y
y_true = y_test * std_y + mean_y
for i in range(5):
print(f"Pred: {y_pred[i]}, Actual: {y_true[i]}")
# Plot training history
plt.figure(figsize=(10, 5))
plt.plot(history.history['loss'], label='Train Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Training History')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
plt.savefig('training_history_256c.png')
plt.show()
return model, history
if __name__ == "__main__":
model, history = train_model(timesteps=256, cells=1280, attributes=8)
model.plot()

+ 142
- 0
GRU/codes/WideGRU_512.py View File

@ -0,0 +1,142 @@
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
from tensorflow.keras.layers import GRU, Dense, Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.utils import plot_model
from tensorflow.keras import backend as K
# Custom RMSE loss function
def rmse(y_true, y_pred):
return K.sqrt(K.mean(K.square(y_pred - y_true)))
# Callbacks for training
callbacks = [
EarlyStopping(patience=5, restore_best_weights=True),
ModelCheckpoint('best_model.h5', save_best_only=True)
]
# Load data function (assumes 2D X and standardized y)
def load_data(data_dir='./data_512'):
X_train = pd.read_csv(f'{data_dir}/X_train.csv').values
X_test = pd.read_csv(f'{data_dir}/X_test.csv').values
y_train = pd.read_csv(f'{data_dir}/y_train.csv').values
y_test = pd.read_csv(f'{data_dir}/y_test.csv').values
# Store mean and std for inverse transform
mean_y = y_train.mean(axis=0)
std_y = y_train.std(axis=0)
return X_train, X_test, y_train, y_test, mean_y, std_y
# AGRU model class
class AGRU:
def plot(self):
print(f'Ploting model')
plot_model(self.model, to_file=f'gru_architecture_512.jpeg', show_shapes=True, show_layer_names=True)
@classmethod
def log(cls, x):
return int(2 ** (math.log2(x) - 5)) # output = timesteps / 32
def __init__(self, timesteps, cells, attributes=8, layers=2):
assert layers >= 2, "Number of GRU layers must be >= 2"
assert cells >= 5, "Number of cells must be >= 5"
assert timesteps in [64, 128, 256, 512], f"Timestep {timesteps} is not allowed"
self.timesteps = timesteps
self.n = layers
self.m = cells
self.k = attributes
self.l = AGRU.log(timesteps)
self.model = self._build_model()
def _build_model(self):
# Accept input in 2D shape: (timesteps // attributes, attributes)
inputs = Input(shape=(self.timesteps // self.k, self.k))
x = inputs # Already in the right shape
for _ in range(self.n - 1):
x = GRU(self.m, return_sequences=True)(x)
x = GRU(self.m)(x)
if self.m <= 64:
x = Dense(self.m, activation='relu')(x)
else:
sk = self.m
while sk >= 64:
x = Dense(sk, activation='relu')(x)
sk = sk // 2
outputs = Dense(self.l)(x)
model = Model(inputs=inputs, outputs=outputs)
return model
def summary(self):
self.model.summary()
def compile(self):
self.model.compile(optimizer=Adam(learning_rate=0.001), loss=rmse, metrics=['mae', 'mse'])
def fit(self, *args, **kwargs):
return self.model.fit(*args, **kwargs)
def predict(self, *args, **kwargs):
return self.model.predict(*args, **kwargs)
# Training function
def train_model(timesteps=512, cells=128, attributes=8):
X_train, X_test, y_train, y_test, mean_y, std_y = load_data()
# Reshape X into 3D for GRU input: (samples, timesteps // attributes, attributes)
X_train = X_train.reshape(-1, timesteps // attributes, attributes)
X_test = X_test.reshape(-1, timesteps // attributes, attributes)
model = AGRU(timesteps=timesteps, cells=cells, attributes=attributes)
model.compile()
model.summary()
history = model.fit(
X_train, y_train,
validation_data=(X_test, y_test),
epochs=50,
batch_size=32,
verbose=1,
callbacks=callbacks
)
test_loss, test_mae, test_mse = model.model.evaluate(X_test, y_test, verbose=0)
print(f"\nTest Loss (RMSE): {test_loss:.4f}, MAE: {test_mae:.4f}, MSE: {test_mse:.4f}")
# Predictions and inverse transform
y_pred_standardized = model.predict(X_test)
y_pred = y_pred_standardized * std_y + mean_y
y_true = y_test * std_y + mean_y
for i in range(5):
print(f"Pred: {y_pred[i]}, Actual: {y_true[i]}")
# Plot training history
plt.figure(figsize=(10, 5))
plt.plot(history.history['loss'], label='Train Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Training History')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
plt.savefig('training_history_512_gru.png')
plt.show()
return model, history
if __name__ == "__main__":
#model, history = train_model(timesteps=512, cells=128, attributes=8)
#model.plot()
agru =AGRU(timesteps=512, cells=128, attributes=8)
agru.plot()

+ 139
- 0
GRU/codes/WideGRU_512_cloud.py View File

@ -0,0 +1,139 @@
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
from tensorflow.keras.layers import GRU, Dense, Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.utils import plot_model
from tensorflow.keras import backend as K
# Custom RMSE loss function
def rmse(y_true, y_pred):
return K.sqrt(K.mean(K.square(y_pred - y_true)))
# Callbacks for training
callbacks = [
EarlyStopping(patience=5, restore_best_weights=True),
ModelCheckpoint('best_model.h5', save_best_only=True)
]
# Load data function (assumes 2D X and standardized y)
def load_data(data_dir='./data_512'):
X_train = pd.read_csv(f'{data_dir}/X_train.csv').values
X_test = pd.read_csv(f'{data_dir}/X_test.csv').values
y_train = pd.read_csv(f'{data_dir}/y_train.csv').values
y_test = pd.read_csv(f'{data_dir}/y_test.csv').values
# Store mean and std for inverse transform
mean_y = y_train.mean(axis=0)
std_y = y_train.std(axis=0)
return X_train, X_test, y_train, y_test, mean_y, std_y
# AGRU model class
class AGRU:
def plot(self):
print(f'Ploting model')
plot_model(self.model, to_file=f'gru_architecture_512c.jpeg', show_shapes=True, show_layer_names=True)
@classmethod
def log(cls, x):
return int(2 ** (math.log2(x) - 5)) # output = timesteps / 32
def __init__(self, timesteps, cells, attributes=8, layers=4):
assert layers >= 2, "Number of GRU layers must be >= 2"
assert cells >= 5, "Number of cells must be >= 5"
assert timesteps in [64, 128, 256, 512], f"Timestep {timesteps} is not allowed"
self.timesteps = timesteps
self.n = layers
self.m = cells
self.k = attributes
self.l = AGRU.log(timesteps)
self.model = self._build_model()
def _build_model(self):
# Accept input in 2D shape: (timesteps // attributes, attributes)
inputs = Input(shape=(self.timesteps // self.k, self.k))
x = inputs # Already in the right shape
for _ in range(self.n - 1):
x = GRU(self.m, return_sequences=True)(x)
x = GRU(self.m)(x)
if self.m <= 64:
x = Dense(self.m, activation='relu')(x)
else:
sk = self.m
while sk >= 64:
x = Dense(sk, activation='relu')(x)
sk = sk // 2
outputs = Dense(self.l)(x)
model = Model(inputs=inputs, outputs=outputs)
return model
def summary(self):
self.model.summary()
def compile(self):
self.model.compile(optimizer=Adam(learning_rate=0.0005), loss=rmse, metrics=['mae', 'mse'])
def fit(self, *args, **kwargs):
return self.model.fit(*args, **kwargs)
def predict(self, *args, **kwargs):
return self.model.predict(*args, **kwargs)
# Training function
def train_model(timesteps=512, cells=1280, attributes=8):
X_train, X_test, y_train, y_test, mean_y, std_y = load_data()
# Reshape X into 3D for GRU input: (samples, timesteps // attributes, attributes)
X_train = X_train.reshape(-1, timesteps // attributes, attributes)
X_test = X_test.reshape(-1, timesteps // attributes, attributes)
model = AGRU(timesteps=timesteps, cells=cells, attributes=attributes)
model.compile()
model.summary()
history = model.fit(
X_train, y_train,
validation_data=(X_test, y_test),
epochs=35,
batch_size=32,
verbose=1,
callbacks=callbacks
)
test_loss, test_mae, test_mse = model.model.evaluate(X_test, y_test, verbose=0)
print(f"\nTest Loss (RMSE): {test_loss:.4f}, MAE: {test_mae:.4f}, MSE: {test_mse:.4f}")
# Predictions and inverse transform
y_pred_standardized = model.predict(X_test)
y_pred = y_pred_standardized * std_y + mean_y
y_true = y_test * std_y + mean_y
for i in range(5):
print(f"Pred: {y_pred[i]}, Actual: {y_true[i]}")
# Plot training history
plt.figure(figsize=(10, 5))
plt.plot(history.history['loss'], label='Train Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Training History')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
plt.savefig('training_history_512c.png')
plt.show()
return model, history
if __name__ == "__main__":
model, history = train_model(timesteps=512, cells=1280, attributes=8)
model.plot()

+ 142
- 0
GRU/codes/WideGRU_64.py View File

@ -0,0 +1,142 @@
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
from tensorflow.keras.layers import GRU, Dense, Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.utils import plot_model
from tensorflow.keras import backend as K
# Custom RMSE loss function
def rmse(y_true, y_pred):
return K.sqrt(K.mean(K.square(y_pred - y_true)))
# Callbacks for training
callbacks = [
EarlyStopping(patience=10, restore_best_weights=True),
ModelCheckpoint('best_model.h5', save_best_only=True)
]
# Load data function (assumes 2D X and standardized y)
def load_data(data_dir='./data_64'):
X_train = pd.read_csv(f'{data_dir}/X_train.csv').values
X_test = pd.read_csv(f'{data_dir}/X_test.csv').values
y_train = pd.read_csv(f'{data_dir}/y_train.csv').values
y_test = pd.read_csv(f'{data_dir}/y_test.csv').values
# Store mean and std for inverse transform
mean_y = y_train.mean(axis=0)
std_y = y_train.std(axis=0)
return X_train, X_test, y_train, y_test, mean_y, std_y
# AGRU model class
class AGRU:
def plot(self):
print(f'Ploting model')
plot_model(self.model, to_file=f'gru_architecture_64.jpeg', show_shapes=True, show_layer_names=True)
@classmethod
def log(cls, x):
return int(2 ** (math.log2(x) - 5)) # output = timesteps / 32
def __init__(self, timesteps, cells, attributes=8, layers=2):
assert layers >= 2, "Number of GRU layers must be >= 2"
assert cells >= 5, "Number of cells must be >= 5"
assert timesteps in [64, 128, 256, 512], f"Timestep {timesteps} is not allowed"
self.timesteps = timesteps
self.n = layers
self.m = cells
self.k = attributes
self.l = AGRU.log(timesteps)
self.model = self._build_model()
def _build_model(self):
# Accept input in 2D shape: (timesteps // attributes, attributes)
inputs = Input(shape=(self.timesteps // self.k, self.k))
x = inputs # Already in the right shape
for _ in range(self.n - 1):
x = GRU(self.m, return_sequences=True)(x)
x = GRU(self.m)(x)
if self.m <= 128:
x = Dense(self.m, activation='relu')(x)
else:
sk = self.m
while sk >= 128:
x = Dense(sk, activation='relu')(x)
sk = sk // 2
outputs = Dense(self.l)(x)
model = Model(inputs=inputs, outputs=outputs)
return model
def summary(self):
self.model.summary()
def compile(self):
self.model.compile(optimizer=Adam(learning_rate=0.001), loss=rmse, metrics=['mae', 'mse'])
def fit(self, *args, **kwargs):
return self.model.fit(*args, **kwargs)
def predict(self, *args, **kwargs):
return self.model.predict(*args, **kwargs)
# Training function
def train_model(timesteps=64, cells=16, attributes=8):
X_train, X_test, y_train, y_test, mean_y, std_y = load_data()
# Reshape X into 3D for GRU input: (samples, timesteps // attributes, attributes)
X_train = X_train.reshape(-1, timesteps // attributes, attributes)
X_test = X_test.reshape(-1, timesteps // attributes, attributes)
model = AGRU(timesteps=timesteps, cells=cells, attributes=attributes)
model.compile()
model.summary()
history = model.fit(
X_train, y_train,
validation_data=(X_test, y_test),
epochs=50,
batch_size=16,
verbose=1,
callbacks=callbacks
)
test_loss, test_mae, test_mse = model.model.evaluate(X_test, y_test, verbose=0)
print(f"\nTest Loss (RMSE): {test_loss:.4f}, MAE: {test_mae:.4f}, MSE: {test_mse:.4f}")
# Predictions and inverse transform
y_pred_standardized = model.predict(X_test)
y_pred = y_pred_standardized * std_y + mean_y
y_true = y_test * std_y + mean_y
for i in range(5):
print(f"Pred: {y_pred[i]}, Actual: {y_true[i]}")
# Plot training history
plt.figure(figsize=(10, 5))
plt.plot(history.history['loss'], label='Train Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Training History')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
plt.savefig('training_history_64.png')
plt.show()
return model, history
if __name__ == "__main__":
model, history = train_model(timesteps=64, cells=16, attributes=8)
model.plot()
#agru =AGRU(timesteps=64, cells=16, attributes=8)
#agru.plot()

+ 139
- 0
GRU/codes/WideGRU_64_cloud.py View File

@ -0,0 +1,139 @@
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import math
from tensorflow.keras.layers import GRU, Dense, Input
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras.utils import plot_model
from tensorflow.keras import backend as K
# Custom RMSE loss function
def rmse(y_true, y_pred):
return K.sqrt(K.mean(K.square(y_pred - y_true)))
# Callbacks for training
callbacks = [
EarlyStopping(patience=5, restore_best_weights=True),
ModelCheckpoint('best_model.h5', save_best_only=True)
]
# Load data function (assumes 2D X and standardized y)
def load_data(data_dir='./data_64'):
X_train = pd.read_csv(f'{data_dir}/X_train.csv').values
X_test = pd.read_csv(f'{data_dir}/X_test.csv').values
y_train = pd.read_csv(f'{data_dir}/y_train.csv').values
y_test = pd.read_csv(f'{data_dir}/y_test.csv').values
# Store mean and std for inverse transform
mean_y = y_train.mean(axis=0)
std_y = y_train.std(axis=0)
return X_train, X_test, y_train, y_test, mean_y, std_y
# AGRU model class
class AGRU:
def plot(self):
print(f'Ploting model')
plot_model(self.model, to_file=f'gru_architecture_64c.jpeg', show_shapes=True, show_layer_names=True)
@classmethod
def log(cls, x):
return int(2 ** (math.log2(x) - 5)) # output = timesteps / 32
def __init__(self, timesteps, cells, attributes=8, layers=4):
assert layers >= 2, "Number of GRU layers must be >= 2"
assert cells >= 5, "Number of cells must be >= 5"
assert timesteps in [64, 128, 256, 512], f"Timestep {timesteps} is not allowed"
self.timesteps = timesteps
self.n = layers
self.m = cells
self.k = attributes
self.l = AGRU.log(timesteps)
self.model = self._build_model()
def _build_model(self):
# Accept input in 2D shape: (timesteps // attributes, attributes)
inputs = Input(shape=(self.timesteps // self.k, self.k))
x = inputs # Already in the right shape
for _ in range(self.n - 1):
x = GRU(self.m, return_sequences=True)(x)
x = GRU(self.m)(x)
if self.m <= 64:
x = Dense(self.m, activation='relu')(x)
else:
sk = self.m
while sk >= 64:
x = Dense(sk, activation='relu')(x)
sk = sk // 2
outputs = Dense(self.l)(x)
model = Model(inputs=inputs, outputs=outputs)
return model
def summary(self):
self.model.summary()
def compile(self):
self.model.compile(optimizer=Adam(learning_rate=0.0007), loss=rmse, metrics=['mae', 'mse'])
def fit(self, *args, **kwargs):
return self.model.fit(*args, **kwargs)
def predict(self, *args, **kwargs):
return self.model.predict(*args, **kwargs)
# Training function
def train_model(timesteps=64, cells=1280, attributes=8):
X_train, X_test, y_train, y_test, mean_y, std_y = load_data()
# Reshape X into 3D for GRU input: (samples, timesteps // attributes, attributes)
X_train = X_train.reshape(-1, timesteps // attributes, attributes)
X_test = X_test.reshape(-1, timesteps // attributes, attributes)
model = AGRU(timesteps=timesteps, cells=cells, attributes=attributes)
model.compile()
model.summary()
history = model.fit(
X_train, y_train,
validation_data=(X_test, y_test),
epochs=30,
batch_size=32,
verbose=1,
callbacks=callbacks
)
test_loss, test_mae, test_mse = model.model.evaluate(X_test, y_test, verbose=0)
print(f"\nTest Loss (RMSE): {test_loss:.4f}, MAE: {test_mae:.4f}, MSE: {test_mse:.4f}")
# Predictions and inverse transform
y_pred_standardized = model.predict(X_test)
y_pred = y_pred_standardized * std_y + mean_y
y_true = y_test * std_y + mean_y
for i in range(5):
print(f"Pred: {y_pred[i]}, Actual: {y_true[i]}")
# Plot training history
plt.figure(figsize=(10, 5))
plt.plot(history.history['loss'], label='Train Loss')
plt.plot(history.history['val_loss'], label='Validation Loss')
plt.title('Training History')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
plt.savefig('training_history_64c.png')
plt.show()
return model, history
if __name__ == "__main__":
model, history = train_model(timesteps=64, cells=1280, attributes=8)
model.plot()

+ 32068
- 0
data/all_data.csv
File diff suppressed because it is too large
View File


+ 32068
- 0
data/aqi_data.csv
File diff suppressed because it is too large
View File


+ 32068
- 0
data/complete_data.csv
File diff suppressed because it is too large
View File


+ 32068
- 0
data/input_data.csv
File diff suppressed because it is too large
View File


+ 32068
- 0
data/processed_aqi.csv
File diff suppressed because it is too large
View File


+ 32068
- 0
data/processed_dataset.csv
File diff suppressed because it is too large
View File


BIN
presentation/MARIA_PSAROPA_Presentation.pptx View File


+ 161
- 0
slideNN/codes/slideNN.py View File

@ -0,0 +1,161 @@
import os
os.environ['TF_CPP_MIN_LOG_LEVEL']='3'
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.layers import Input, Dense
from tensorflow.keras.optimizers import SGD,Adam
from tensorflow.keras.utils import plot_model
import numpy as np
import pandas as pd
class slideNN:
def plot(self):
for el in range(4):
print(f'Ploting model[{el}]')
plot_model(self.models[el], to_file=f'model_architecture_{el}.jpeg', show_shapes=True, show_layer_names=True)
def __init__(self, lin_elements=(64,128,256,512), lout_elements=(2,4,8,16),nmodels=4):
if len(lin_elements)!=nmodels:
print(f' in tuple elements={lin_elements}!={nmodels}')
exit(1)
if len(lout_elements)!=nmodels:
print(f' out tuple elements={lout_elements}!={nmodels}')
exit(1)
if nmodels!=4:
print(f'Up to now only 4 models allowed!')
exit(1)
self.in_elements=lin_elements
self.out_elements=lout_elements
self.nmodels=nmodels
self.models=[]
for i in range(self.nmodels):
self.build_model(self.in_elements[i],self.out_elements[i])
def build_model(self,in_element, out_element):
#Build model
inputs=Input(shape=(in_element,))
x=np.empty(shape=(5,), dtype=object)
x[0]=inputs
for i in range(1,5):
x[i]=Dense(in_element//(pow(2,i)), activation='relu') (x[i-1])
outputs=Dense(out_element) (x[4])
model=Model(inputs=inputs, outputs=outputs)
model._name="slideNN_"+str(in_element)
self.models.append(model)
'''
def build_model(self, in_element, out_element):
inputs = Input(shape=(in_element,))
x = np.empty(shape=(5,), dtype=object)
x[0] = inputs
#add dense + dropout layers
for i in range(1,5):
dense_layer = Dense(in_element // (2 ** i), activation='relu') (x[i-1])
x[i] = Dropout(0.3)(dense_layer)
outputs=Dense(out_element)(x[4])
model = Model(inputs=inputs, outputs=outputs)
model._name = "slideNN_" + str(in_element)
self.models.append(model)
'''
def compile(self):
for i in range(4):
self.models[i].compile(
optimizer=Adam(learning_rate=0.0004), #before 0.002
loss='mse', #metrics=[MeanSquaredError()],
#metrics=['accuracy']
metrics=[
'mae',
'mse'
]
)
def summary(self):
for i in range(4):
self.models[i].summary()
'''
to i einai ena apo ta 4 montela
'''
def train_i(self,X_train,y_train,i,epochs=300,batch_size=8,validation_split=0.2):
#before epochs=300, batch_size=8
return self.models[i].fit(
X_train,
y_train,
epochs=epochs,
batch_size=batch_size,
validation_split=validation_split
)
def evaluate_i(self,X_test,y_test,i):
mse, mae=self.models[i].evaluate(X_test,y_test)
return {'mae':mae, 'mse':mse}
def save_model(self,filepath):
if os.path.exists(filepath):
print(f' Path {filepath} exists!')
exit(1)
else:
os.mkdir(filepath)
try:
for i in range(4):
self.models[i].save(filepath+os.sep+filepath+"_"+str(self.in_elements[i]))
print(f'Model saved to {filepath}_{self.in_elements[i]}')
except Exception as e:
print("Save"+str(e))
return False
@staticmethod
def load_model(filepath):
if os.path.exists(filepath):
try:
modelw=slideNN()
#modelw.compile()
in_elements=(64,128,256,512)
for i in range(4):
print(f'Loading file:{filepath}')
modelw.models[i]=tf.keras.models.load_model(filepath+os.sep+filepath+"_"+str(in_elements[i]))
modelw.compile()
return modelw
except Exception as e:
print ("Load:"+str(e))
return None
x=slideNN()
x.summary()
x.compile()
x.plot()
X_train=pd.read_csv("reshaped_input_64.csv")
X_train=X_train[:3206]
X_train_1=X_train.values
Y_train=pd.read_csv("reshaped_aqi_2.csv")
Y_train_1=Y_train.values
print (X_train_1.shape)
print (Y_train_1.shape)
x.train_i(X_train_1,Y_train_1,0)
# After training is complete
from sklearn.metrics import mean_squared_error
import joblib
# Load the scaler you saved
aqi_scaler = joblib.load("aqi_scaler.save")
print("Loaded scaler mean:", aqi_scaler.mean_)
print("Loaded scaler scale:", aqi_scaler.scale_)
# Get predictions from the model
predictions = x.models[0].predict(X_train_1)
# Reshape the predictions to 2D (if they are 1D)
predictions_reshaped = predictions.reshape(-1, 1)
# Inverse transform predictions and true values
pred_aqi_real = aqi_scaler.inverse_transform(predictions)
true_aqi_real = aqi_scaler.inverse_transform(Y_train_1)
# Print some examples
for i in range(5):
print(f"Predicted AQI: {pred_aqi_real[i]}, Actual AQI: {true_aqi_real[i]}")
# Evaluate error in real AQI values
mse_real = mean_squared_error(true_aqi_real, pred_aqi_real)
print(f"Mean Squared Error in real AQI values: {mse_real}")

+ 222
- 0
slideNN/codes/slideNN_1.py View File

@ -0,0 +1,222 @@
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.layers import Input, Dense
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.utils import plot_model
import numpy as np
import pandas as pd
from sklearn.metrics import mean_squared_error
from sklearn.metrics import r2_score
import joblib
import matplotlib.pyplot as plt
#new
from tensorflow.keras import regularizers
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.layers import Dropout, BatchNormalization, Activation
#Define RMSE loss/metric
def rmse(y_true, y_pred):
return tf.sqrt(tf.reduce_mean(tf.square(y_true - y_pred)))
class slideNN:
def plot(self):
for el in range(4):
print(f'Ploting model[{el}]')
plot_model(self.models[el], to_file=f'model_architecture_{el}.jpeg', show_shapes=True, show_layer_names=True,dpi=150)
def __init__(self, lin_elements=(64, 128, 256, 512), lout_elements=(2, 4, 8, 16), nmodels=4):
if len(lin_elements) != nmodels:
print(f'in tuple elements={lin_elements}!={nmodels}')
exit(1)
if len(lout_elements) != nmodels:
print(f'out tuple elements={lout_elements}!={nmodels}')
exit(1)
if nmodels != 4:
print('Up to now only 4 models allowed!')
exit(1)
self.in_elements = lin_elements
self.out_elements = lout_elements
self.nmodels = nmodels
self.models = []
for i in range(self.nmodels):
self.build_model(self.in_elements[i], self.out_elements[i])
def build_model(self, in_element, out_element):
inputs = Input(shape=(in_element,))
x = np.empty(shape=(5,), dtype=object)
x[0] = inputs
for i in range(1, 5):
for i in range(1, 5):
x[i] = Dense(in_element // (2 ** i), activation=None)(x[i - 1]) # No activation here
#x[i] = BatchNormalization()(x[i]) # Add Batch Norm
x[i] = Activation('relu')(x[i]) # Then apply activation
#x[i] = Dropout(0.4)(x[i])
outputs = Dense(out_element)(x[4])
model = Model(inputs=inputs, outputs=outputs)
model._name = "slideNN_" + str(in_element)
self.models.append(model)
def compile(self):
for i in range(4):
self.models[i].compile(
optimizer=Adam(learning_rate=0.0005),
loss=rmse,
metrics=[
'mae',
rmse
]
)
def summary(self):
for i in range(4):
self.models[i].summary()
def train_i(self, X_train, y_train, i, epochs=400, batch_size=16, validation_split=0.2, window_size=10):
history = self.models[i].fit(
X_train,
y_train,
epochs=epochs,
batch_size=batch_size,
validation_split=validation_split,
shuffle = False
)
def moving_average(data, window_size=5):
if len(data) < window_size:
return data
return np.convolve(data, np.ones(window_size) / window_size, mode='valid')
# Smooth losses using moving average
train_loss_smooth = moving_average(history.history['loss'], window_size=window_size)
val_loss_smooth = moving_average(history.history['val_loss'], window_size=window_size) if 'val_loss' in history.history else None
# Plot smoothed curves
plt.figure(figsize=(10, 5))
plt.plot(train_loss_smooth, label='Train Loss', linewidth=2)
if val_loss_smooth is not None:
plt.plot(val_loss_smooth, label='Validation Loss', linewidth=2)
plt.title(f'Training History - Model {i} ({self.in_elements[i]}nn)')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
ymin, ymax = plt.ylim()
yticks = np.arange(np.floor(ymin * 10) / 10, np.ceil(ymax * 10) / 10 + 0.15, 0.15)
plt.yticks(yticks)
# Save and show
os.makedirs("plots", exist_ok=True)
save_path = os.path.join("plots", f"training_history_{self.in_elements[i]}nn.png")
plt.savefig(save_path)
print(f"Saved training plot to {save_path}")
plt.show()
return history
def evaluate_i(self, X_test, y_test, i):
results = self.models[i].evaluate(X_test, y_test)
return dict(zip(self.models[i].metrics_names, results))
def save_model(self, filepath):
if os.path.exists(filepath):
print(f'Path {filepath} exists!')
exit(1)
else:
os.mkdir(filepath)
try:
for i in range(4):
self.models[i].save(filepath + os.sep + filepath + "_" + str(self.in_elements[i]))
print(f'Model saved to {filepath}_{self.in_elements[i]}')
except Exception as e:
print("Save " + str(e))
return False
@staticmethod
def load_model(filepath):
if os.path.exists(filepath):
try:
modelw = slideNN()
in_elements = (64, 128, 256, 512)
for i in range(4):
print(f'Loading file: {filepath}')
modelw.models[i] = tf.keras.models.load_model(
filepath + os.sep + filepath + "_" + str(in_elements[i]),
custom_objects={'rmse': rmse}
)
modelw.compile()
return modelw
except Exception as e:
print("Load: " + str(e))
return None
x = slideNN()
x.summary()
x.compile()
x.plot()
#Load data
X_train = pd.read_csv("reshaped_input_64.csv")[:3206]
Y_train = pd.read_csv("reshaped_aqi_2.csv")
X_train_1 = X_train.values
Y_train_1 = Y_train.values
print(X_train_1.shape)
print(Y_train_1.shape)
# Train model 0
x.train_i(X_train_1, Y_train_1, 0)
# Load scaler
aqi_scaler = joblib.load("aqi_scaler.save")
print("Loaded scaler mean:", aqi_scaler.mean_)
print("Loaded scaler scale:", aqi_scaler.scale_)
# Predict and inverse transform
predictions = x.models[0].predict(X_train_1)
pred_aqi_real = aqi_scaler.inverse_transform(predictions)
true_aqi_real = aqi_scaler.inverse_transform(Y_train_1)
print(pd.DataFrame({
"Pred (std)": [predictions[i] for i in range(5)],
"True (std)": [Y_train_1[i] for i in range(5)],
"Pred (real)": [pred_aqi_real[i] for i in range(5)],
"True (real)": [true_aqi_real[i] for i in range(5)],
}))
# Evaluate R2 in original scale
#r2_real = r2_score(true_aqi_real, pred_aqi_real)
#print(f"R² in real AQI values: {r2_real}")
#Evaluate RMSE in original scale
mse_real = mean_squared_error(true_aqi_real, pred_aqi_real)
rmse_real = np.sqrt(mse_real)
print(f"RMSE in real AQI values: {rmse_real}")
# Evaluate RMSE in standardized scale
mse_std = mean_squared_error(Y_train_1, predictions)
rmse_std = np.sqrt(mse_std)
print(f"RMSE in standardized AQI values: {rmse_std}")
model_result = {
"input_size": 64,
"output_size": 2,
"rmse_standardized": rmse_std,
}
results_file = "model_results.csv"
# Check if the file exists
if os.path.exists(results_file):
df = pd.read_csv(results_file)
df = pd.concat([df, pd.DataFrame([model_result])], ignore_index=True)
else:
df = pd.DataFrame([model_result])
# Save (overwrite) the file
df.to_csv(results_file, index=False)

+ 213
- 0
slideNN/codes/slideNN_2.py View File

@ -0,0 +1,213 @@
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.layers import Input, Dense
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.utils import plot_model
import numpy as np
import pandas as pd
from sklearn.metrics import mean_squared_error
from sklearn.metrics import r2_score
import joblib
import matplotlib.pyplot as plt
#Define RMSE loss/metric
def rmse(y_true, y_pred):
return tf.sqrt(tf.reduce_mean(tf.square(y_true - y_pred)))
class slideNN:
def plot(self):
for el in range(4):
print(f'Ploting model[{el}]')
plot_model(self.models[el], to_file=f'model_architecture_{el}.jpeg', show_shapes=True, show_layer_names=True,dpi=150)
def __init__(self, lin_elements=(64, 128, 256, 512), lout_elements=(2, 4, 8, 16), nmodels=4):
if len(lin_elements) != nmodels:
print(f'in tuple elements={lin_elements}!={nmodels}')
exit(1)
if len(lout_elements) != nmodels:
print(f'out tuple elements={lout_elements}!={nmodels}')
exit(1)
if nmodels != 4:
print('Up to now only 4 models allowed!')
exit(1)
self.in_elements = lin_elements
self.out_elements = lout_elements
self.nmodels = nmodels
self.models = []
for i in range(self.nmodels):
self.build_model(self.in_elements[i], self.out_elements[i])
def build_model(self, in_element, out_element):
inputs = Input(shape=(in_element,))
x = np.empty(shape=(5,), dtype=object)
x[0] = inputs
for i in range(1, 5):
x[i] = Dense(in_element // (2 ** i), activation='relu')(x[i-1])
outputs = Dense(out_element)(x[4])
model = Model(inputs=inputs, outputs=outputs)
model._name = "slideNN_" + str(in_element)
self.models.append(model)
def compile(self):
for i in range(4):
self.models[i].compile(
optimizer=Adam(learning_rate=0.0008),
loss=rmse,
metrics=[
'mae',
'mse',
rmse
]
)
def summary(self):
for i in range(4):
self.models[i].summary()
def train_i(self, X_train, y_train, i, epochs=400, batch_size=16, validation_split=0.2):
history = self.models[i].fit(
X_train,
y_train,
epochs=epochs,
batch_size=batch_size,
validation_split=validation_split,
shuffle=False
)
def smooth_curve(points, factor=0.9):
smoothed = []
for point in points:
if smoothed:
smoothed.append(smoothed[-1] * factor + point * (1 - factor))
else:
smoothed.append(point)
return smoothed
# Smooth losses
train_loss_smooth = smooth_curve(history.history['loss'], factor=0.9)
val_loss_smooth = smooth_curve(history.history['val_loss'], factor=0.9) if 'val_loss' in history.history else None
# Plot only smoothed curves
plt.figure(figsize=(10, 5))
plt.plot(train_loss_smooth, label='Train Loss', linewidth=2)
if val_loss_smooth:
plt.plot(val_loss_smooth, label='Validation Loss', linewidth=2)
plt.title(f'Training History - Model {i} ({self.in_elements[i]}nn)')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
# Save and show
os.makedirs("plots", exist_ok=True)
save_path = os.path.join("plots", f"training_history_{self.in_elements[i]}nn.png")
plt.savefig(save_path)
print(f"Saved training plot to {save_path}")
plt.show()
return history
def evaluate_i(self, X_test, y_test, i):
results = self.models[i].evaluate(X_test, y_test)
return dict(zip(self.models[i].metrics_names, results))
def save_model(self, filepath):
if os.path.exists(filepath):
print(f'Path {filepath} exists!')
exit(1)
else:
os.mkdir(filepath)
try:
for i in range(4):
self.models[i].save(filepath + os.sep + filepath + "_" + str(self.in_elements[i]))
print(f'Model saved to {filepath}_{self.in_elements[i]}')
except Exception as e:
print("Save " + str(e))
return False
@staticmethod
def load_model(filepath):
if os.path.exists(filepath):
try:
modelw = slideNN()
in_elements = (64, 128, 256, 512)
for i in range(4):
print(f'Loading file: {filepath}')
modelw.models[i] = tf.keras.models.load_model(
filepath + os.sep + filepath + "_" + str(in_elements[i]),
custom_objects={'rmse': rmse}
)
modelw.compile()
return modelw
except Exception as e:
print("Load: " + str(e))
return None
x = slideNN()
x.summary()
x.compile()
x.plot()
#Load data
X_train = pd.read_csv("reshaped_input_128.csv")[:1603]
Y_train = pd.read_csv("reshaped_aqi_4.csv")
X_train_2 = X_train.values
Y_train_2 = Y_train.values
print(X_train_2.shape)
print(Y_train_2.shape)
# Train model 1
x.train_i(X_train_2, Y_train_2, 1)
# Load scaler
aqi_scaler = joblib.load("aqi_scaler.save")
print("Loaded scaler mean:", aqi_scaler.mean_)
print("Loaded scaler scale:", aqi_scaler.scale_)
# Predict and inverse transform
predictions = x.models[1].predict(X_train_2)
pred_aqi_real = aqi_scaler.inverse_transform(predictions)
true_aqi_real = aqi_scaler.inverse_transform(Y_train_2)
print(pd.DataFrame({
"Pred (std)": [predictions[i] for i in range(5)],
"True (std)": [Y_train_2[i] for i in range(5)],
"Pred (real)": [pred_aqi_real[i] for i in range(5)],
"True (real)": [true_aqi_real[i] for i in range(5)],
}))
# Evaluate R2 in original scale
#r2_real = r2_score(true_aqi_real, pred_aqi_real)
#print(f"R² in real AQI values: {r2_real}")
#Evaluate RMSE in original scale
mse_real = mean_squared_error(true_aqi_real, pred_aqi_real)
rmse_real = np.sqrt(mse_real)
print(f"RMSE in real AQI values: {rmse_real}")
# Evaluate RMSE in standardized scale
mse_std = mean_squared_error(Y_train_2, predictions)
rmse_std = np.sqrt(mse_std)
print(f"RMSE in standardized AQI values: {rmse_std}")
model_result = {
"input_size": 128,
"output_size": 4,
"rmse_standardized": rmse_std,
}
results_file = "model_results.csv"
# Check if the file exists
if os.path.exists(results_file):
df = pd.read_csv(results_file)
df = pd.concat([df, pd.DataFrame([model_result])], ignore_index=True)
else:
df = pd.DataFrame([model_result])
# Save (overwrite) the file
df.to_csv(results_file, index=False)

+ 210
- 0
slideNN/codes/slideNN_3.py View File

@ -0,0 +1,210 @@
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.layers import Input, Dense
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.utils import plot_model
import numpy as np
import pandas as pd
from sklearn.metrics import mean_squared_error
from sklearn.metrics import r2_score
import joblib
import matplotlib.pyplot as plt
#Define RMSE loss/metric
def rmse(y_true, y_pred):
return tf.sqrt(tf.reduce_mean(tf.square(y_true - y_pred)))
class slideNN:
def plot(self):
for el in range(4):
print(f'Ploting model[{el}]')
plot_model(self.models[el], to_file=f'model_architecture_{el}.jpeg', show_shapes=True, show_layer_names=True,dpi=150)
def __init__(self, lin_elements=(64, 128, 256, 512), lout_elements=(2, 4, 8, 16), nmodels=4):
if len(lin_elements) != nmodels:
print(f'in tuple elements={lin_elements}!={nmodels}')
exit(1)
if len(lout_elements) != nmodels:
print(f'out tuple elements={lout_elements}!={nmodels}')
exit(1)
if nmodels != 4:
print('Up to now only 4 models allowed!')
exit(1)
self.in_elements = lin_elements
self.out_elements = lout_elements
self.nmodels = nmodels
self.models = []
for i in range(self.nmodels):
self.build_model(self.in_elements[i], self.out_elements[i])
def build_model(self, in_element, out_element):
inputs = Input(shape=(in_element,))
x = np.empty(shape=(5,), dtype=object)
x[0] = inputs
for i in range(1, 5):
x[i] = Dense(in_element // (2 ** i), activation='relu')(x[i-1])
outputs = Dense(out_element)(x[4])
model = Model(inputs=inputs, outputs=outputs)
model._name = "slideNN_" + str(in_element)
self.models.append(model)
def compile(self):
for i in range(4):
self.models[i].compile(
optimizer=Adam(learning_rate=0.0001),
loss=rmse,
metrics=[
'mae',
'mse',
rmse
]
)
def summary(self):
for i in range(4):
self.models[i].summary()
def train_i(self, X_train, y_train, i, epochs=400, batch_size=16, validation_split=0.2, window_size=7):
history = self.models[i].fit(
X_train,
y_train,
epochs=epochs,
batch_size=batch_size,
validation_split=validation_split,
shuffle = False
)
def moving_average(data, window_size=7):
if len(data) < window_size:
return data
return np.convolve(data, np.ones(window_size) / window_size, mode='valid')
# Smooth losses using moving average
train_loss_smooth = moving_average(history.history['loss'], window_size=window_size)
val_loss_smooth = moving_average(history.history['val_loss'], window_size=window_size) if 'val_loss' in history.history else None
# Plot smoothed curves
plt.figure(figsize=(10, 5))
plt.plot(train_loss_smooth, label='Train Loss', linewidth=2)
if val_loss_smooth is not None:
plt.plot(val_loss_smooth, label='Validation Loss', linewidth=2)
plt.title(f'Training History - Model {i} ({self.in_elements[i]}nn)')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
# Save and show
os.makedirs("plots", exist_ok=True)
save_path = os.path.join("plots", f"training_history_{self.in_elements[i]}nn.png")
plt.savefig(save_path)
print(f"Saved training plot to {save_path}")
plt.show()
return history
def evaluate_i(self, X_test, y_test, i):
results = self.models[i].evaluate(X_test, y_test)
return dict(zip(self.models[i].metrics_names, results))
def save_model(self, filepath):
if os.path.exists(filepath):
print(f'Path {filepath} exists!')
exit(1)
else:
os.mkdir(filepath)
try:
for i in range(4):
self.models[i].save(filepath + os.sep + filepath + "_" + str(self.in_elements[i]))
print(f'Model saved to {filepath}_{self.in_elements[i]}')
except Exception as e:
print("Save " + str(e))
return False
@staticmethod
def load_model(filepath):
if os.path.exists(filepath):
try:
modelw = slideNN()
in_elements = (64, 128, 256, 512)
for i in range(4):
print(f'Loading file: {filepath}')
modelw.models[i] = tf.keras.models.load_model(
filepath + os.sep + filepath + "_" + str(in_elements[i]),
custom_objects={'rmse': rmse}
)
modelw.compile()
return modelw
except Exception as e:
print("Load: " + str(e))
return None
x = slideNN()
x.summary()
x.compile()
x.plot()
#Load data
X_train = pd.read_csv("reshaped_input_256.csv")[:801]
Y_train = pd.read_csv("reshaped_aqi_8.csv")
X_train_3 = X_train.values
Y_train_3 = Y_train.values
print(X_train_3.shape)
print(Y_train_3.shape)
# Train model 2
x.train_i(X_train_3, Y_train_3, 2)
# Load scaler
aqi_scaler = joblib.load("aqi_scaler.save")
print("Loaded scaler mean:", aqi_scaler.mean_)
print("Loaded scaler scale:", aqi_scaler.scale_)
# Predict and inverse transform
predictions = x.models[2].predict(X_train_3)
pred_aqi_real = aqi_scaler.inverse_transform(predictions)
true_aqi_real = aqi_scaler.inverse_transform(Y_train_3)
print(pd.DataFrame({
"Pred (std)": [predictions[i] for i in range(5)],
"True (std)": [Y_train_3[i] for i in range(5)],
"Pred (real)": [pred_aqi_real[i] for i in range(5)],
"True (real)": [true_aqi_real[i] for i in range(5)],
}))
# Evaluate R2 in original scale
#r2_real = r2_score(true_aqi_real, pred_aqi_real)
#print(f"R² in real AQI values: {r2_real}")
#Evaluate RMSE in original scale
mse_real = mean_squared_error(true_aqi_real, pred_aqi_real)
rmse_real = np.sqrt(mse_real)
print(f"RMSE in real AQI values: {rmse_real}")
# Evaluate RMSE in standardized scale
mse_std = mean_squared_error(Y_train_3, predictions)
rmse_std = np.sqrt(mse_std)
print(f"RMSE in standardized AQI values: {rmse_std}")
model_result = {
"input_size": 256,
"output_size": 8,
"rmse_standardized": rmse_std,
}
results_file = "model_results.csv"
# Check if the file exists
if os.path.exists(results_file):
df = pd.read_csv(results_file)
df = pd.concat([df, pd.DataFrame([model_result])], ignore_index=True)
else:
df = pd.DataFrame([model_result])
# Save (overwrite) the file
df.to_csv(results_file, index=False)

+ 214
- 0
slideNN/codes/slideNN_4.py View File

@ -0,0 +1,214 @@
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.layers import Input, Dense
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.utils import plot_model
import numpy as np
import pandas as pd
from sklearn.metrics import mean_squared_error
from sklearn.metrics import r2_score
import joblib
import matplotlib.pyplot as plt
#Define RMSE loss/metric
def rmse(y_true, y_pred):
return tf.sqrt(tf.reduce_mean(tf.square(y_true - y_pred)))
class slideNN:
def plot(self):
for el in range(4):
print(f'Ploting model[{el}]')
plot_model(self.models[el], to_file=f'model_architecture_{el}.jpeg', show_shapes=True, show_layer_names=True,dpi=150)
def __init__(self, lin_elements=(64, 128, 256, 512), lout_elements=(2, 4, 8, 16), nmodels=4):
if len(lin_elements) != nmodels:
print(f'in tuple elements={lin_elements}!={nmodels}')
exit(1)
if len(lout_elements) != nmodels:
print(f'out tuple elements={lout_elements}!={nmodels}')
exit(1)
if nmodels != 4:
print('Up to now only 4 models allowed!')
exit(1)
self.in_elements = lin_elements
self.out_elements = lout_elements
self.nmodels = nmodels
self.models = []
for i in range(self.nmodels):
self.build_model(self.in_elements[i], self.out_elements[i])
def build_model(self, in_element, out_element):
inputs = Input(shape=(in_element,))
x = np.empty(shape=(5,), dtype=object)
x[0] = inputs
for i in range(1, 5):
x[i] = Dense(in_element // (2 ** i), activation='relu')(x[i-1])
outputs = Dense(out_element)(x[4])
model = Model(inputs=inputs, outputs=outputs)
model._name = "slideNN_" + str(in_element)
self.models.append(model)
def compile(self):
for i in range(4):
self.models[i].compile(
optimizer=Adam(learning_rate=0.0001),
loss=rmse,
metrics=[
'mae',
'mse',
rmse
]
)
def summary(self):
for i in range(4):
self.models[i].summary()
def train_i(self, X_train, y_train, i, epochs=400, batch_size=16, validation_split=0.2, window_size=10):
history = self.models[i].fit(
X_train,
y_train,
epochs=epochs,
batch_size=batch_size,
validation_split=validation_split,
shuffle = False
)
def moving_average(data, window_size=10):
if len(data) < window_size:
return data
return np.convolve(data, np.ones(window_size) / window_size, mode='valid')
# Smooth losses using moving average
train_loss_smooth = moving_average(history.history['loss'], window_size=window_size)
val_loss_smooth = moving_average(history.history['val_loss'], window_size=window_size) if 'val_loss' in history.history else None
# Plot smoothed curves
plt.figure(figsize=(10, 5))
plt.plot(train_loss_smooth, label='Train Loss', linewidth=2)
if val_loss_smooth is not None:
plt.plot(val_loss_smooth, label='Validation Loss', linewidth=2)
plt.title(f'Training History - Model {i} ({self.in_elements[i]}nn)')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.legend()
plt.grid(True)
ymin, ymax = plt.ylim()
yticks = np.arange(np.floor(ymin * 10) / 10, np.ceil(ymax * 10) / 10 + 0.2, 0.2)
plt.yticks(yticks)
# Save and show
os.makedirs("plots", exist_ok=True)
save_path = os.path.join("plots", f"training_history_{self.in_elements[i]}nn.png")
plt.savefig(save_path)
print(f"Saved training plot to {save_path}")
plt.show()
return history
def evaluate_i(self, X_test, y_test, i):
results = self.models[i].evaluate(X_test, y_test)
return dict(zip(self.models[i].metrics_names, results))
def save_model(self, filepath):
if os.path.exists(filepath):
print(f'Path {filepath} exists!')
exit(1)
else:
os.mkdir(filepath)
try:
for i in range(4):
self.models[i].save(filepath + os.sep + filepath + "_" + str(self.in_elements[i]))
print(f'Model saved to {filepath}_{self.in_elements[i]}')
except Exception as e:
print("Save " + str(e))
return False
@staticmethod
def load_model(filepath):
if os.path.exists(filepath):
try:
modelw = slideNN()
in_elements = (64, 128, 256, 512)
for i in range(4):
print(f'Loading file: {filepath}')
modelw.models[i] = tf.keras.models.load_model(
filepath + os.sep + filepath + "_" + str(in_elements[i]),
custom_objects={'rmse': rmse}
)
modelw.compile()
return modelw
except Exception as e:
print("Load: " + str(e))
return None
x = slideNN()
x.summary()
x.compile()
x.plot()
#Load data
X_train = pd.read_csv("reshaped_input_512.csv")[:400]
Y_train = pd.read_csv("reshaped_aqi_16.csv")
X_train_4 = X_train.values
Y_train_4 = Y_train.values
print(X_train_4.shape)
print(Y_train_4.shape)
# Train model 3
x.train_i(X_train_4, Y_train_4, 3)
# Load scaler
aqi_scaler = joblib.load("aqi_scaler.save")
print("Loaded scaler mean:", aqi_scaler.mean_)
print("Loaded scaler scale:", aqi_scaler.scale_)
# Predict and inverse transform
predictions = x.models[3].predict(X_train_4)
pred_aqi_real = aqi_scaler.inverse_transform(predictions)
true_aqi_real = aqi_scaler.inverse_transform(Y_train_4)
print(pd.DataFrame({
"Pred (std)": [predictions[i] for i in range(5)],
"True (std)": [Y_train_4[i] for i in range(5)],
"Pred (real)": [pred_aqi_real[i] for i in range(5)],
"True (real)": [true_aqi_real[i] for i in range(5)],
}))
# Evaluate R2 in original scale
#r2_real = r2_score(true_aqi_real, pred_aqi_real)
#print(f"R² in real AQI values: {r2_real}")
#Evaluate RMSE in original scale
mse_real = mean_squared_error(true_aqi_real, pred_aqi_real)
rmse_real = np.sqrt(mse_real)
print(f"RMSE in real AQI values: {rmse_real}")
# Evaluate RMSE in standardized scale
mse_std = mean_squared_error(Y_train_4, predictions)
rmse_std = np.sqrt(mse_std)
print(f"RMSE in standardized AQI values: {rmse_std}")
model_result = {
"input_size": 512,
"output_size": 16,
"rmse_standardized": rmse_std,
}
results_file = "model_results.csv"
# Check if the file exists
if os.path.exists(results_file):
df = pd.read_csv(results_file)
df = pd.concat([df, pd.DataFrame([model_result])], ignore_index=True)
else:
df = pd.DataFrame([model_result])
# Save (overwrite) the file
df.to_csv(results_file, index=False)

+ 19
- 0
slideNN/slideNN_data/preprocessing/dataframes.py View File

@ -0,0 +1,19 @@
import pandas as pd
# Load the original dataset
df = pd.read_csv("all_data.csv")
df = df.round(3)
# Define columns for input data and AQI data
input_columns = ["PM1", "PM2.5", "PM10", "PM4", "Temp", "Hum", "WindSpd", "WindDir"] # Adjust as needed
aqi_columns = ["AQI"] # Adjust this based on your dataset
# Create separate DataFrames
df_input = df[input_columns]
df_aqi = df[aqi_columns]
# Save to CSV
df_input.to_csv("input_data.csv", index=False)
df_aqi.to_csv("aqi_data.csv", index=False)
print("Files saved: input_data.csv and aqi_data.csv")

+ 30
- 0
slideNN/slideNN_data/preprocessing/model_graph.py View File

@ -0,0 +1,30 @@
from graphviz import Digraph
def generate_model_graph(rows, features_per_row=8):
elements = rows * features_per_row
model_name = f"Model: {rows} Rows → {elements} Elements"
g = Digraph(name=f'Model{elements}', format='png')
g.attr(rankdir='TB') # top to bottom layout for better title display
g.attr('node', fontname='Helvetica', style='filled')
# Header node (acts as a title)
g.node('header', model_name, shape='plaintext', style='', fontname='Helvetica-Bold', fontsize='16')
# Diagram content
g.node('rows', f'{rows} Rows x {features_per_row} Features = {elements}', fillcolor='aliceblue')
g.node('reshape', 'Reshape', shape='oval', fillcolor='darkorange')
g.node('vector', f'1D Input Vector ({elements})', fillcolor='lightyellow')
# Create edges
g.edge('header', 'rows', style='invis')
g.edge('rows', 'reshape')
g.edge('reshape', 'vector')
filename = f'model_{elements}'
g.render(filename=filename, cleanup=True)
print(f"Graph saved as {filename}.png")
# Example: generate all 4
for r in [8, 16, 32, 64]:
generate_model_graph(rows=r)

+ 48
- 0
slideNN/slideNN_data/preprocessing/preprocess_data.py View File

@ -0,0 +1,48 @@
import pandas as pd
from sklearn.preprocessing import StandardScaler, MinMaxScaler
import joblib
data = pd.read_csv("input_data.csv")
aqi_data = pd.read_csv("aqi_data.csv")
# 1. Split the data
pm_cols = ['PM1', 'PM2.5', 'PM4', 'PM10']
meteo_cols = ['Temp', 'Hum', 'WindSpd', 'WindDir']
pm_data = data[pm_cols]
meteo_data = data[meteo_cols]
# 2. Standardize PM data
pm_scaler = StandardScaler()
pm_standardized = pd.DataFrame(pm_scaler.fit_transform(pm_data), columns=pm_cols)
# 3. Standardize environmental data
meteo_scaler = StandardScaler()
meteo_standardized = meteo_scaler.fit_transform(meteo_data)
# 4. Normalize the standardized environmental data
meteo_normalizer = MinMaxScaler()
meteo_standardized_normalized = pd.DataFrame(meteo_normalizer.fit_transform(meteo_standardized), columns=meteo_cols)
# 5. Recombine if needed
final_data = pd.concat([pm_standardized, meteo_standardized_normalized], axis=1)
final_data.to_csv("processed_dataset.csv", index=False)
aqi_cols = ['AQI']
aqi_data = aqi_data[aqi_cols]
# Standardize AQI
aqi_scaler = StandardScaler()
aqi_standardized = aqi_scaler.fit_transform(aqi_data)
# Save it
joblib.dump(aqi_scaler, "aqi_scaler.save")
print("Fitted scaler mean:", aqi_scaler.mean_)
print("Fitted scaler scale:", aqi_scaler.scale_)
# Optionally, convert back to DataFrame
aqi_standardized_df = pd.DataFrame(aqi_standardized, columns=aqi_cols)
aqi_standardized_df.to_csv("processed_aqi.csv", index=False)

+ 68
- 0
slideNN/slideNN_data/preprocessing/reshaping_aqi.py View File

@ -0,0 +1,68 @@
import pandas as pd
df = pd.read_csv('processed_aqi.csv')
def reshape_aqi_2():
reshaped_aqis = df["AQI"].tolist()
row_aqis = []
for i in range(8, len(reshaped_aqis), 10):
if i + 1 < len(reshaped_aqis):
row_aqis.append([reshaped_aqis[i], reshaped_aqis[i+1]])
reshaped_df = pd.DataFrame(row_aqis, columns=['AQI_1', 'AQI_2'])
reshaped_df.to_csv('reshaped_aqi_2.csv', index=False)
print(reshaped_df.shape)
def reshape_aqi_4():
reshaped_aqis = df["AQI"].tolist()
row_aqis = []
for i in range(16, len(reshaped_aqis), 20):
if i + 3 < len(reshaped_aqis):
row_aqis.append([reshaped_aqis[i], reshaped_aqis[i+1],reshaped_aqis[i+2], reshaped_aqis[i+3]])
reshaped_df = pd.DataFrame(row_aqis, columns=['AQI_1', 'AQI_2', 'AQI_3', 'AQI_4'])
reshaped_df.to_csv('reshaped_aqi_4.csv', index=False)
print(reshaped_df.shape)
def reshape_aqi_8():
reshaped_aqis = df["AQI"].tolist()
row_aqis = []
for i in range(32, len(reshaped_aqis), 40):
if i + 7 < len(reshaped_aqis):
row_aqis.append([reshaped_aqis[i], reshaped_aqis[i+1],reshaped_aqis[i+2], reshaped_aqis[i+3], reshaped_aqis[i+4], reshaped_aqis[i+5],reshaped_aqis[i+6], reshaped_aqis[i+7]])
reshaped_df = pd.DataFrame(row_aqis, columns=['AQI_1', 'AQI_2', 'AQI_3', 'AQI_4', 'AQI_5', 'AQI_6', 'AQI_7', 'AQI_8'])
reshaped_df.to_csv('reshaped_aqi_8.csv', index=False)
print(reshaped_df.shape)
def reshape_aqi_16():
reshaped_aqis = df["AQI"].tolist()
row_aqis = []
for i in range(64, len(reshaped_aqis), 80):
if i + 15 < len(reshaped_aqis):
row_aqis.append([reshaped_aqis[i], reshaped_aqis[i+1],reshaped_aqis[i+2], reshaped_aqis[i+3], reshaped_aqis[i+4], reshaped_aqis[i+5],reshaped_aqis[i+6], reshaped_aqis[i+7], reshaped_aqis[i+8], reshaped_aqis[i+9],reshaped_aqis[i+10], reshaped_aqis[i+11], reshaped_aqis[i+12], reshaped_aqis[i+13],reshaped_aqis[i+14], reshaped_aqis[i+15]])
reshaped_df = pd.DataFrame(row_aqis, columns=['AQI_1', 'AQI_2', 'AQI_3', 'AQI_4', 'AQI_5', 'AQI_6', 'AQI_7', 'AQI_8', 'AQI_9', 'AQI_10', 'AQI_11', 'AQI_12', 'AQI_13', 'AQI_14', 'AQI_15', 'AQI_16'])
reshaped_df.to_csv('reshaped_aqi_16.csv', index=False)
print(reshaped_df.shape)
reshape_aqi_2()
reshape_aqi_4()
reshape_aqi_8()
reshape_aqi_16()

+ 49
- 0
slideNN/slideNN_data/preprocessing/reshaping_models_input.py View File

@ -0,0 +1,49 @@
import pandas as pd
import numpy as np
df = pd.read_csv("processed_dataset.csv")
def reshape_input_64():
reshaped_values = df.values.flatten() #convert to 1D array
valid_rows = 256536 //64
reshaped_values =reshaped_values[:valid_rows * 64]
#convert back to DataFrame
reshaped_df = pd.DataFrame(reshaped_values.reshape(valid_rows,64))
reshaped_df.to_csv("reshaped_input_64.csv", index=False)
print(reshaped_df.shape)
def reshape_input_128():
reshaped_values = df.values.flatten() #convert to 1D array
valid_rows = 256536 //128
reshaped_values =reshaped_values[:valid_rows * 128]
#convert back to DataFrame
reshaped_df = pd.DataFrame(reshaped_values.reshape(valid_rows,128))
reshaped_df.to_csv("reshaped_input_128.csv", index=False)
print(reshaped_df.shape)
def reshape_input_256():
reshaped_values = df.values.flatten() #convert to 1D array
valid_rows = 256536 //256
reshaped_values =reshaped_values[:valid_rows * 256]
#convert back to DataFrame
reshaped_df = pd.DataFrame(reshaped_values.reshape(valid_rows,256))
reshaped_df.to_csv("reshaped_input_256.csv", index=False)
print(reshaped_df.shape)
def reshape_input_512():
reshaped_values = df.values.flatten() #convert to 1D array
valid_rows = 256536 //512
reshaped_values =reshaped_values[:valid_rows * 512]
#convert back to DataFrame
reshaped_df = pd.DataFrame(reshaped_values.reshape(valid_rows,512))
reshaped_df.to_csv("reshaped_input_512.csv", index=False)
print(reshaped_df.shape)
reshape_input_64()
reshape_input_128()
reshape_input_256()
reshape_input_512()

Loading…
Cancel
Save