Disabled external gits

This commit is contained in:
2022-04-07 18:46:57 +02:00
parent 88cb3426ad
commit 15e7120d6d
5316 changed files with 4563444 additions and 6 deletions

View File

@@ -0,0 +1,834 @@
v 3.06161699786838e-17 -7.49879891330929e-33 0.5
v 3.06161699786838e-17 -7.49879891330929e-33 -0.5
v 0.154508497187474 -3.78436673043416e-17 -0.475528258147577
v 0.293892626146237 -7.19829327805997e-17 -0.404508497187474
v 0.404508497187474 -9.90760072617093e-17 -0.293892626146236
v 0.475528258147577 -1.16470831848909e-16 -0.154508497187473
v 0.5 -1.22464679914735e-16 7.6571373978539e-16
v 0.475528258147577 -1.16470831848909e-16 0.154508497187474
v 0.404508497187474 -9.90760072617092e-17 0.293892626146237
v 0.293892626146237 -7.19829327805998e-17 0.404508497187473
v 0.154508497187475 -3.78436673043417e-17 0.475528258147576
v 0.21857645514496 0.136716465258129 0.428407447861946
v 0.475945187701446 -0.132880263668928 0.0762824608354488
v 0.218576349120797 -0.134908613951094 -0.428980239040003
v 0.228603425820446 -0.140038091834834 0.422054269660211
v 0.437104447096958 0.151793486569612 0.189468835862535
v 0.42898023903994 0.134908613950884 -0.21857634912105
v 0.428980239039898 -0.134908613951062 -0.218576349121023
v 0.218576349121032 0.134908613951053 -0.428980239039896
v 0.340440645253916 0.134908613951044 0.34044064525392
v 0.476615030125843 -0.131924241455095 -0.0737163996315581
v 0.407622547196207 -0.259520994885 -0.128423955051913
v 0.338436818269637 -0.252043139096244 -0.26820659215246
v 0.340989631871085 -0.116746524471516 -0.34654915954341
v 0.075316277025205 -0.134908613951047 0.475528258147578
v 0.0753162770252058 0.134908613951052 -0.475528258147577
v 0.337051887252752 0.13152434625125 -0.345104870499591
v 0.342307229899869 0.259749321291958 -0.255648294433281
v 0.235302886190634 0.257758938760451 0.358040334654413
v 0.0930553185121102 0.241091096498632 0.428037137273972
v 0.414887348894352 0.253233345682864 -0.117223548665273
v 0.309219636002729 0.365205559163796 -0.144941768535501
v 0.116382730487621 0.357335962703667 0.329797013029059
v 0.220295554503833 0.356044147287606 -0.273317459830304
v 0.173924737961772 0.443407381211481 -0.152118637293744
v 0.0757439138278267 0.415836842579461 -0.2671003179923
v 0.0224933467184133 0.480041055177656 -0.138038526134294
v 0.248065266401332 0.358015947011543 0.245536566096914
v 0.138163331821839 0.43047956133561 0.21353744639334
v 0.360552014587397 0.346405942121308 -0.00227333235984791
v 0.123093945281709 0.484207258358844 -0.0197790694319915
v -0.0375624502146155 0.498585389330104 0.0012930121510933
v -0.0319756224028659 0.329253812136088 0.374925975048146
v -0.0571707300985015 0.188277472409031 0.459655415505214
v 0.44634465869394 0.222444950782125 0.0359817943817138
v 0.114071738612042 0.30750614917439 -0.377395822274126
v 0.236387555749274 0.430565411758525 0.0934577427723124
v 0.378115930351658 0.292151103254715 0.147227973161872
v 0.0698331513994325 -0.131524346251195 -0.477309833660296
v -0.0276507157238025 0.368077837732622 -0.337274581446689
v -0.132019323454869 0.46789161623721 -0.116826083086996
v -0.178527337205295 0.268217150323689 0.382344805303572
v -0.195982573985885 0.117715178528912 0.444672876885665
v -0.188769871137012 0.462097824846099 0.0288363663352296
v -0.311118332326446 0.185953595932789 0.344422187801651
v 0.298513275017753 -0.364576673892107 -0.167253321318123
v -0.27446477920923 0.408828269442573 -0.0867671082739308
v -0.321076581736832 0.0337877006396921 0.381796044958135
v -0.406068994209582 0.0890512487956863 0.27780901178606
v -0.396337226815336 -0.0658166787739775 0.297632268807486
v 0.137344602759695 -0.267387515925555 0.399550217644719
v -0.469353412437567 -0.00745235730436951 0.172197086513736
v -0.438850244007533 -0.16046355744979 0.17793793880195
v -0.488260481987745 -0.0978280168539874 0.0450708425427184
v -0.433483128474169 -0.244453321072111 0.0483213321946162
v -0.460620100758965 -0.173287974513347 -0.0883198769579221
v -0.383511806626786 -0.309356779000242 -0.08495338406711
v -0.336717499334961 -0.365993326701377 0.0516740791080927
v -0.267862281691792 -0.414575366652434 -0.0798565176542698
v -0.205085219904653 -0.452251021719178 0.0583872069088724
v -0.389473197418456 -0.226083984285031 -0.217247924138414
v -0.445256364809781 -0.0799966822341631 -0.212949055945092
v -0.00351125947165275 -0.267035886479874 0.422704987418922
v -0.353169743739217 -0.125114465368412 -0.331085340452809
v -0.38892388844103 0.0261181792321889 -0.313139026174485
v -0.278756909608118 -0.262391732725563 -0.321628922740179
v -0.249429844449192 -0.39750217428021 0.172559480007787
v -0.125203314206269 -0.478452051724907 -0.0735375027587528
v -0.0528582364531956 -0.493487744300911 0.0606288138087549
v 0.271731582933658 -0.249600298700644 0.337433901267964
v -0.495847117939593 0.0542719818547096 0.034499095880349
v 0.341636271151409 -0.364235836672196 -0.0248377438078891
v -0.278221380393658 -0.0165162196036634 -0.415114535979952
v -0.385894465893779 0.231366911979078 0.218070661102819
v 0.129422773211733 -0.26368230955868 -0.404624993543385
v 0.17079787798835 -0.360453290963367 0.301498772648173
v -0.2972397651044 0.134475553784439 -0.378899521608367
v -0.390729424978202 0.17308135749756 -0.259563788196669
v 0.0302401087170613 -0.494617483324727 -0.0666264287973931
v -0.0447104151313644 -0.455911463185833 -0.200363960118914
v -0.289781305004565 -0.107480642258869 0.393032704503674
v -0.28072748874216 0.276043192337753 -0.308208100199367
v 0.106085899784673 -0.483055958177403 0.0735032185430484
v -0.143288452953271 0.380504725979635 0.291006138693707
v -0.0207720062787277 -0.262149359393134 -0.425260199319106
v -0.0793282266177742 -0.112493174960849 0.480679017691543
v -0.210229488803243 0.104747954203238 -0.441397131988602
v 0.435024352571936 -0.127806035969131 0.210759174982314
v 0.298839864283654 -0.34288662805342 0.207661974894498
v -0.343961566185834 -0.213169748618088 0.293681969588004
v -0.217656630917657 -0.259665634918282 0.367694641054313
v -0.318981501523703 0.381763369474201 0.0500752574969736
v -0.395445063483211 0.303503289973959 -0.0388452666581461
v 0.00695685172784659 -0.466794760772503 0.179037017204205
v -0.141786434074944 -0.0639250457860653 -0.475194902785753
v 0.123376202006679 -0.445805073774975 -0.189831896621448
v 0.0273264572598231 -0.36207385125428 -0.343737968475292
v -0.179178868825853 0.231344360320625 -0.40543152308883
v -0.131442519100517 -0.37228536331636 -0.30680037880178
v 0.213344976802497 -0.44944368426887 -0.0498427080320272
v -0.095411435088729 0.468568967697982 0.146081417588813
v 0.19556911033449 -0.349389679263868 -0.299465482329545
v -0.212579734450761 0.392914303643436 -0.22456225527287
v 0.00442729082563762 0.431790532829453 0.252066131907487
v 0.251435921428911 -0.424020659066107 0.0835850351458869
v -0.0716881267417996 0.0362403081699758 0.493505271043782
v 0.409702957808806 -0.250532734704164 0.139200701163351
v 0.354048851093961 -0.123591021755264 0.33071841554491
v -0.409329654955812 0.269763307055223 0.0983717019288848
v -0.23775056881332 0.409432492346691 0.160747321096556
v -0.280167779637764 0.310303261210272 0.274258821799984
v 0.0390199999029228 -0.340176969429353 0.364358434892671
v -0.106955495739533 -0.389774683020182 0.294340310538035
v -0.125152025350624 -0.457070129870627 0.159448634144933
v 0.250205349609497 0.430527105380925 -0.0452072401182732
v -0.15862733630032 -0.0449406159687499 0.472035707563119
v -0.372118237460504 -0.283526014116421 0.176468174662014
v -0.235245742246689 -0.152187819676347 -0.41412354231191
v 0.479986141812706 0.124680814448665 -0.063780860578784
v -0.0761042573074939 0.431963531953512 -0.240032600039915
v -0.288453021308633 -0.353652776154199 -0.204265925734934
v -0.458840137015611 0.057463941494015 -0.190167358112447
v -0.444083105517519 0.194920415588484 -0.121639742603844
v -0.178055249060845 -0.43316536366806 -0.175111667230364
v -0.491963173165518 -0.0298523928793253 -0.0841490991531884
v -0.45636138284715 0.138396610027018 0.150268648023688
v -0.345904715972311 0.313415014736247 -0.179223201639723
v -0.153169726530236 -0.269052072762942 -0.392619430258638
v -0.0419923946510265 0.170189522618945 -0.468265058681734
v 0.243612070418092 0.241981955824921 -0.363452737232445
v 0.338867576345089 0.252678472264692 0.2670624559087
v 0.426863022631355 -0.25994025908482 0.0147993789376581
v 0.158342102182598 -0.434609327079221 0.18984865417522
v 0.0452931243905481 0.0952764475019849 0.488744239285072
v -0.237259629761916 0.0166159415076913 0.439808797744033
v 0.247175666187241 -0.238473344419651 -0.363365730423495
v 0.0577997827759178 0.48434516267528 0.10985876616872
v -0.334770924239934 0.332574086299653 0.165296416795679
v -0.153183582101622 0.339412359720881 -0.333667559470828
v 0.36785960116632 -0.228792320684729 0.249666553277471
v 0.483390586525237 0.102332156736947 0.0765615475049495
v 0.139648641361451 0.213951152339616 -0.429794324507027
v -0.0358864034291634 0.302032201639968 -0.396848478920397
v -0.180786711862885 -0.175443810495746 0.431897712627121
v 0.348215026659532 -0.346723777603642 0.0923521372398145
v -0.114480413553308 -0.290654614949134 0.390402522688358
v -0.272442803422386 -0.321472772094441 0.269128548588341
v -0.466114498296331 0.180818120755528 0.00648703972541844
v -0.129415184584831 -0.203585571998795 -0.437955048918034
v 0.0643376427947934 -0.412575814735467 0.275030661590622
v -0.485934856252322 0.0906364933467189 -0.0751820560562512
v -2.77555756156289e-16 3.19189119579733e-16 -1.2490009027033e-16
v -0.251828742757562 0.0139490906784806 -0.0181338406388946
v 0.197996662720562 -0.0791211777945037 0.136432807235424
v 0.125255777218953 0.210831018463626 -0.063969097850248
v -0.139628236563409 -0.21265335644906 0.00545121255949739
v -0.145223690886062 0.189924594351579 0.11159305462841
v -0.112897563504534 -0.0373976158431084 -0.223966661594476
v 0.225528258147577 -0.0232431701377701 -0.114912387106339
v -0.118530098445147 -0.0689806932344922 0.2117841676383
v 0.098269738040353 0.147959392484917 0.178021817259433
v 0.100852235573853 -0.219450649709681 -0.0387973024559412
v -0.100480207820675 0.192284757140571 -0.148991345058649
v 0.08807336311451 0.0516996280513727 -0.271061939721184
v 0.272113071694785 0.101244013250397 0.0512575837900759
v 0.0341720627218518 -0.221571915156144 0.170600381517002
v 0.113793744063312 -0.150466918879953 -0.232830800332848
v 0.068592399702004 -0.0245878691620408 0.288679790541826
v -0.0741210186337665 0.111594614690448 0.278407708187608
v -0.0351094203418499 0.30351870487887 -0.00261521319733849
v -0.258447343925187 0.0495111872633636 0.167086499211952
v -0.271439309774019 -0.125349807194904 0.104052998028547
v -0.0485204373172835 -0.250891695571267 -0.169649174254392
v -0.245408905954284 -0.137064807392418 -0.132337064396614
v -0.247561324014027 0.19563057757812 -0.0400712992459122
v -0.245489451032576 0.0855397985783712 -0.187709278356085
v 0.27254656245515 -0.16399276001755 -0.00766125806031415
v 0.273716986928946 0.149504817170547 -0.136480234333616
v 0.0525334922800396 0.245214764318482 -0.222591039627852
v -0.025453222260026 0.265072731295802 0.212780231418037
v 0.10188342337719 0.303246953287615 0.102540160700209
v -0.14157478784597 -0.241776139413833 0.180194715613577
v -0.0121617817639016 -0.325829668630181 0.0394634260162139
v 0.222619980200421 0.0438990087026778 0.254181668630204
v 0.187862004819913 -0.26556009247797 0.101876795667186
v 0.342239361396159 -0.0424148965243595 0.0738730045314498
v -0.120466338945889 0.114580505991094 -0.297963959751455
v 0.15154406307463 -0.163641254596307 0.268108582070898
f 166 170 163 182
f 165 162 171 180
f 167 171 162 180
f 4 5 27 169
f 163 166 162 170
f 4 24 169 177
f 167 173 163 185
f 162 163 167 173
f 158 136 181 185
f 162 171 170 178
f 162 163 170 181
f 162 167 163 181
f 4 169 27 174
f 170 171 162 179
f 162 173 165 174
f 170 178 171 179
f 158 181 163 185
f 162 176 164 178
f 162 170 167 181
f 162 170 176 178
f 162 166 163 184
f 167 170 162 179
f 129 169 6 196
f 162 165 169 174
f 73 176 170 178
f 119 181 136 185
f 4 169 174 177
f 72 132 135 163
f 27 174 169 188
f 158 119 136 185
f 64 66 163 182
f 167 162 171 179
f 66 135 64 163
f 7 129 6 196
f 162 173 167 180
f 95 177 168 183
f 162 177 172 183
f 4 24 5 169
f 27 169 5 188
f 162 169 165 175
f 162 164 169 175
f 129 175 169 196
f 168 177 162 183
f 132 161 135 163
f 119 148 84 167
f 162 165 173 180
f 164 162 171 175
f 165 171 162 175
f 158 81 161 163
f 30 171 178 179
f 64 81 62 163
f 169 162 174 177
f 66 163 182 184
f 169 177 23 187
f 162 170 166 176
f 54 167 102 185
f 68 166 67 182
f 162 168 163 173
f 164 171 162 178
f 161 158 163 185
f 65 68 67 182
f 166 168 162 183
f 129 6 169 188
f 162 163 168 184
f 165 175 40 191
f 129 169 175 188
f 119 167 84 181
f 62 64 163 182
f 18 169 23 187
f 67 182 166 184
f 162 168 166 184
f 168 174 162 177
f 164 162 169 172
f 167 181 119 185
f 126 178 170 179
f 72 163 135 184
f 72 132 163 186
f 54 180 167 185
f 62 163 81 181
f 162 168 173 174
f 66 135 163 184
f 165 174 173 189
f 38 141 171 191
f 140 174 188 189
f 161 163 132 186
f 24 23 18 169
f 62 163 181 182
f 65 182 67 184
f 166 162 172 183
f 140 174 27 188
f 162 166 172 176
f 4 14 24 177
f 23 169 24 177
f 169 172 162 177
f 163 173 168 186
f 60 181 170 182
f 119 148 167 185
f 57 173 51 180
f 72 163 184 186
f 41 35 165 180
f 41 37 35 180
f 141 171 48 175
f 56 177 172 187
f 135 161 81 163
f 95 49 168 177
f 102 54 120 167
f 34 140 188 189
f 176 178 73 198
f 162 172 164 176
f 141 171 175 194
f 60 170 91 182
f 130 37 180 189
f 56 23 177 187
f 22 18 23 187
f 121 55 167 179
f 6 169 21 196
f 60 91 100 182
f 12 30 171 178
f 65 66 182 184
f 91 170 100 182
f 164 172 169 187
f 116 178 126 179
f 125 165 40 191
f 69 67 166 184
f 96 73 170 178
f 113 51 57 173
f 57 173 180 185
f 136 119 84 181
f 84 167 121 181
f 83 75 74 186
f 134 131 69 183
f 167 179 55 181
f 130 180 173 189
f 145 179 170 181
f 48 171 141 191
f 48 175 171 191
f 102 167 148 185
f 64 135 81 163
f 30 178 144 179
f 57 180 54 185
f 121 52 55 179
f 40 175 165 188
f 60 91 170 181
f 130 51 173 180
f 76 183 168 184
f 74 75 72 184
f 140 19 27 174
f 165 180 171 191
f 49 174 168 177
f 83 74 168 186
f 84 121 55 181
f 69 70 78 166
f 120 167 54 180
f 121 167 55 181
f 148 121 84 167
f 74 128 168 184
f 131 166 69 183
f 156 170 176 192
f 74 75 184 186
f 67 68 69 166
f 65 67 66 184
f 117 164 150 196
f 96 156 154 170
f 98 150 164 196
f 145 170 58 181
f 161 133 158 185
f 24 18 5 169
f 78 183 166 193
f 34 32 165 188
f 98 117 150 196
f 41 35 125 165
f 35 34 32 165
f 73 156 170 176
f 74 184 168 186
f 72 184 75 186
f 133 161 132 186
f 119 102 148 185
f 150 164 117 195
f 34 188 165 189
f 57 54 102 185
f 148 102 120 167
f 170 181 163 182
f 12 178 171 194
f 6 18 21 169
f 5 169 6 188
f 136 62 81 181
f 171 180 167 190
f 58 170 91 181
f 78 166 70 193
f 2 168 49 174
f 34 140 28 188
f 40 125 32 165
f 38 48 141 191
f 169 174 165 188
f 40 175 48 191
f 113 130 51 173
f 66 72 135 184
f 169 187 21 196
f 131 166 183 184
f 74 128 83 168
f 18 21 169 187
f 7 6 21 196
f 64 65 66 182
f 69 166 131 184
f 56 112 172 177
f 76 168 128 184
f 166 182 68 192
f 159 95 168 183
f 34 165 35 189
f 96 170 126 178
f 172 183 89 193
f 150 118 98 164
f 172 177 106 183
f 168 183 166 184
f 111 167 120 180
f 17 5 6 188
f 30 171 179 190
f 130 37 51 180
f 40 165 32 188
f 100 182 170 192
f 112 106 172 177
f 74 76 128 184
f 170 179 167 181
f 32 125 35 165
f 41 165 125 191
f 113 173 57 185
f 2 168 174 197
f 18 6 5 169
f 167 180 111 190
f 126 170 145 179
f 12 144 30 178
f 91 126 154 170
f 69 68 70 166
f 4 27 19 174
f 137 92 173 186
f 25 73 178 198
f 29 171 141 194
f 60 58 91 181
f 120 167 111 190
f 111 120 54 180
f 150 118 164 198
f 30 33 29 171
f 69 131 67 184
f 95 49 159 168
f 68 182 127 192
f 117 187 164 196
f 166 176 170 192
f 33 171 30 190
f 57 51 54 180
f 137 92 113 173
f 22 23 56 187
f 37 36 35 189
f 29 38 141 171
f 150 164 195 198
f 180 190 171 191
f 145 91 58 170
f 41 180 165 191
f 82 187 172 195
f 137 173 185 186
f 113 137 173 185
f 112 106 177 183
f 164 187 117 195
f 154 170 156 192
f 148 120 121 167
f 165 188 174 189
f 76 138 168 183
f 116 96 126 178
f 76 138 128 168
f 82 56 172 187
f 172 176 166 193
f 9 98 118 164
f 30 29 12 171
f 70 166 192 193
f 33 38 29 171
f 165 175 169 188
f 96 73 156 170
f 141 175 16 194
f 96 25 73 178
f 45 175 40 188
f 98 164 9 196
f 121 94 52 190
f 106 56 112 172
f 70 68 77 166
f 70 192 124 193
f 169 175 164 196
f 31 45 40 188
f 172 177 169 187
f 106 89 172 183
f 113 149 130 173
f 149 189 173 197
f 121 167 94 190
f 171 175 165 191
f 93 110 193 195
f 172 193 110 195
f 125 40 47 191
f 3 49 14 177
f 96 154 126 170
f 78 90 183 193
f 152 26 174 189
f 113 57 137 185
f 16 194 175 196
f 60 62 181 182
f 110 82 172 195
f 92 149 113 173
f 146 23 24 177
f 34 28 32 188
f 163 185 173 186
f 72 75 132 186
f 91 145 126 170
f 38 171 33 191
f 124 192 104 193
f 18 22 21 187
f 77 166 68 192
f 130 36 37 189
f 30 179 43 190
f 131 183 76 184
f 89 110 172 193
f 121 120 94 167
f 9 164 118 194
f 173 174 168 197
f 12 171 29 194
f 56 23 112 177
f 116 1 96 178
f 17 27 5 188
f 140 27 28 188
f 104 192 176 193
f 120 94 167 190
f 112 177 107 183
f 82 56 110 172
f 169 164 187 196
f 164 175 171 194
f 118 194 164 198
f 3 174 49 177
f 9 164 194 196
f 70 166 77 192
f 167 180 173 185
f 146 24 14 177
f 30 44 43 179
f 101 154 156 192
f 152 174 140 189
f 166 182 163 184
f 92 149 173 197
f 171 190 33 191
f 60 63 62 182
f 45 129 175 188
f 163 181 167 185
f 150 195 99 198
f 167 179 171 190
f 143 193 176 195
f 164 176 172 195
f 171 178 164 194
f 164 178 176 198
f 9 194 16 196
f 129 17 6 188
f 78 89 90 193
f 10 118 15 194
f 10 15 178 194
f 150 99 80 198
f 83 186 168 197
f 111 190 180 191
f 77 68 127 192
f 127 68 65 182
f 118 15 194 198
f 10 15 11 178
f 19 3 4 174
f 9 16 8 196
f 45 31 129 188
f 30 144 44 179
f 149 153 189 197
f 40 32 31 188
f 159 168 138 183
f 124 123 104 192
f 89 183 90 193
f 104 176 143 193
f 62 63 64 182
f 137 185 133 186
f 178 194 15 198
f 47 40 48 191
f 89 110 106 172
f 75 83 87 186
f 123 176 104 192
f 105 49 2 168
f 107 177 95 183
f 87 186 83 197
f 173 186 92 197
f 73 122 176 198
f 159 138 95 183
f 149 153 50 189
f 112 107 106 183
f 137 133 88 186
f 120 111 94 190
f 110 56 106 172
f 93 193 143 195
f 170 182 166 192
f 173 180 165 189
f 10 178 11 194
f 114 190 111 191
f 52 53 55 179
f 173 189 174 197
f 139 174 26 189
f 19 140 152 174
f 116 145 53 179
f 36 34 35 189
f 100 127 182 192
f 116 126 145 179
f 134 78 90 183
f 112 23 146 177
f 89 93 110 193
f 115 110 93 195
f 63 60 100 182
f 159 128 138 168
f 11 144 12 178
f 46 140 34 189
f 168 184 163 186
f 16 175 151 196
f 106 90 89 183
f 76 131 109 183
f 41 42 37 180
f 2 3 26 174
f 143 176 86 195
f 70 77 124 192
f 137 88 92 186
f 16 151 8 196
f 73 122 156 176
f 3 2 49 174
f 150 80 118 198
f 155 117 187 195
f 160 143 104 176
f 84 55 59 181
f 25 61 73 198
f 33 30 43 190
f 166 183 172 193
f 105 168 2 197
f 107 85 95 177
f 150 117 99 195
f 79 78 70 193
f 141 16 20 194
f 16 141 48 175
f 111 180 147 191
f 45 129 151 175
f 86 195 176 198
f 156 122 123 176
f 122 86 176 198
f 11 178 12 194
f 29 141 20 194
f 92 108 149 197
f 156 176 123 192
f 82 22 56 187
f 160 104 123 176
f 95 85 49 177
f 48 40 45 175
f 84 59 136 181
f 51 37 42 180
f 105 159 49 168
f 13 187 117 196
f 43 179 52 190
f 139 174 189 197
f 134 109 131 183
f 160 86 143 176
f 96 1 25 178
f 114 111 147 191
f 59 55 58 181
f 19 26 3 174
f 99 195 86 198
f 110 115 82 195
f 155 187 82 195
f 60 62 59 181
f 71 74 72 184
f 25 178 15 198
f 158 103 119 185
f 152 140 46 189
f 175 194 164 196
f 43 44 52 179
f 164 172 187 195
f 59 62 136 181
f 100 157 127 192
f 93 104 143 193
f 15 25 11 178
f 59 58 60 181
f 73 61 122 198
f 99 86 80 198
f 54 51 42 180
f 102 119 103 185
f 67 71 66 184
f 71 72 66 184
f 39 33 190 191
f 64 63 65 182
f 152 139 26 189
f 160 123 122 176
f 39 38 33 191
f 132 88 133 186
f 21 187 13 196
f 92 186 87 197
f 124 79 70 193
f 2 26 139 174
f 76 109 138 183
f 114 39 33 190
f 38 47 48 191
f 117 13 142 187
f 142 155 117 187
f 153 152 46 189
f 25 1 11 178
f 41 147 42 180
f 42 111 54 180
f 98 13 117 196
f 122 61 86 198
f 105 83 128 168
f 176 192 166 193
f 107 112 85 177
f 122 86 160 176
f 41 147 180 191
f 76 74 71 184
f 13 21 142 187
f 2 174 139 197
f 131 71 67 184
f 85 146 14 177
f 142 82 155 187
f 36 130 50 189
f 97 87 83 197
f 87 92 88 186
f 41 125 47 191
f 42 147 111 180
f 83 168 105 197
f 155 99 117 195
f 155 82 115 195
f 45 151 16 175
f 57 102 103 185
f 49 85 14 177
f 158 133 103 185
f 39 190 114 191
f 44 144 116 179
f 115 93 143 195
f 8 98 9 196
f 1 144 11 178
f 63 100 127 182
f 168 186 173 197
f 146 85 112 177
f 139 189 153 197
f 108 153 149 197
f 52 44 53 179
f 32 28 31 188
f 44 116 53 179
f 108 92 87 197
f 21 22 142 187
f 176 195 164 198
f 90 106 107 183
f 10 11 12 194
f 138 107 95 183
f 82 142 22 187
f 28 27 17 188
f 7 21 13 196
f 131 76 71 184
f 100 101 157 192
f 176 193 172 195
f 20 16 9 194
f 152 26 19 174
f 88 132 75 186
f 164 194 178 198
f 129 31 17 188
f 57 103 137 185
f 127 65 63 182
f 159 105 128 168
f 139 152 153 189
f 138 109 107 183
f 137 103 133 185
f 79 89 78 193
f 94 43 52 190
f 16 48 45 175
f 25 15 61 198
f 36 46 34 189
f 12 29 20 194
f 75 87 88 186
f 77 127 157 192
f 9 118 10 194
f 111 114 94 190
f 97 83 105 197
f 99 143 86 195
f 80 15 118 198
f 8 151 7 196
f 124 104 79 193
f 134 90 109 183
f 39 114 147 191
f 33 43 114 190
f 79 93 89 193
f 77 123 124 192
f 36 50 46 189
f 79 104 93 193
f 31 28 17 188
f 97 105 139 197
f 61 80 86 198
f 139 105 2 197
f 109 90 107 183
f 8 7 13 196
f 143 99 115 195
f 98 8 13 196
f 47 38 39 191
f 41 47 147 191
f 153 46 50 189
f 157 101 123 192
f 94 114 43 190
f 115 99 155 195
f 157 123 77 192
f 12 20 10 194
f 156 123 101 192
f 61 15 80 198
f 20 9 10 194
f 47 39 147 191
f 97 108 87 197
f 139 153 108 197
f 139 108 97 197
f 181 158 81 136
f 181 81 158 163
f 69 183 78 134
f 78 183 69 166
f 185 161 186 163
f 185 186 161 133
f 180 35 189 37
f 180 189 35 165
f 179 121 190 167
f 179 190 121 52
f 177 3 4 14
f 177 4 3 174
f 55 181 53 58
f 55 53 181 179
f 145 53 181 58
f 145 181 53 179
f 100 192 91 101
f 100 91 192 170
f 154 91 192 101
f 154 192 91 170
f 196 129 151 7
f 196 151 129 175
f 189 149 130 50
f 189 130 149 173
f 116 178 144 1
f 116 144 178 179

View File

@@ -0,0 +1,278 @@
v 0 1 0
v 1 1 0
v 2 1 0
v 3 1 0
v 4 1 0
v 5 1 0
v 6 1 0
v 7 1 0
v 8 1 0
v 9 1 0
v 10 1 0
v 0 0 0
v 1 0 0
v 2 0 0
v 3 0 0
v 4 0 0
v 5 0 0
v 6 0 0
v 7 0 0
v 8 0 0
v 9 0 0
v 10 0 0
v 0 1 2
v 1 1 2
v 2 1 2
v 3 1 2
v 4 1 2
v 5 1 2
v 6 1 2
v 7 1 2
v 8 1 2
v 9 1 2
v 10 1 2
v 0 2 2
v 1 2 2
v 2 2 2
v 3 2 2
v 4 2 2
v 5 2 2
v 6 2 2
v 7 2 2
v 8 2 2
v 9 2 2
v 10 2 2
v 1 0 1
v 2 0 1
v 3 0 1
v 4 0 1
v 5 0 1
v 6 0 1
v 7 0 1
v 8 0 1
v 9 0 1
v 10 0 1
v 0 0 2
v 1 0 2
v 2 0 2
v 3 0 2
v 4 0 2
v 5 0 2
v 6 0 2
v 7 0 2
v 8 0 2
v 9 0 2
v 10 0 2
v 10 1 1
v 10 2 0
v 9 2 0
v 8 2 0
v 7 2 0
v 6 2 0
v 5 2 0
v 4 2 0
v 3 2 0
v 2 2 0
v 1 2 0
v 0 2 0
v 10 2 1
v 9 2 1
v 8 2 1
v 7 2 1
v 6 2 1
v 5 2 1
v 4 2 1
v 3 2 1
v 2 2 1
v 1 2 1
v 0 2 1
v 0 1 1
v 0 0 1
f 36 86 25 24
f 1 87 88 89
f 12 13 1 45
f 85 38 37 26
f 87 35 23 24
f 48 85 4 47
f 61 28 49 60
f 46 14 2 3
f 9 31 8 52
f 85 37 25 26
f 19 7 18 51
f 61 28 50 49
f 49 28 27 60
f 85 5 4 73
f 3 85 4 74
f 28 61 50 29
f 32 31 43 79
f 23 45 90 89
f 28 39 40 82
f 10 54 53 21
f 48 85 47 26
f 48 85 5 4
f 7 50 18 51
f 50 28 29 6
f 85 4 74 73
f 88 87 23 89
f 77 1 76 87
f 13 46 14 2
f 1 12 45 90
f 87 35 34 23
f 1 2 87 89
f 9 53 20 21
f 29 28 82 6
f 29 82 7 6
f 48 49 16 5
f 25 47 58 46
f 15 48 16 4
f 16 48 5 4
f 2 1 45 89
f 1 2 76 87
f 8 30 7 51
f 35 86 36 24
f 45 1 90 89
f 76 2 86 87
f 80 30 41 81
f 85 5 73 84
f 30 62 29 51
f 50 29 7 6
f 23 24 45 89
f 77 1 87 88
f 76 2 75 86
f 48 85 26 27
f 2 86 87 24
f 3 74 75 86
f 3 85 74 86
f 3 24 86 2
f 2 3 75 86
f 23 87 24 89
f 30 80 41 42
f 28 5 27 83
f 30 29 41 81
f 47 58 59 26
f 15 48 4 47
f 88 87 34 23
f 87 86 35 24
f 13 2 1 45
f 55 23 45 90
f 86 85 36 25
f 13 46 2 45
f 46 25 24 57
f 5 73 84 72
f 36 85 37 25
f 3 85 86 25
f 25 3 24 86
f 85 84 38 27
f 10 11 66 54
f 59 48 26 27
f 48 47 59 26
f 46 25 57 58
f 23 56 55 45
f 85 48 5 27
f 85 38 26 27
f 23 24 56 45
f 46 24 2 45
f 3 25 24 46
f 3 24 2 46
f 24 46 57 45
f 24 57 56 45
f 87 2 24 89
f 24 2 45 89
f 47 25 3 46
f 3 15 4 47
f 10 11 54 22
f 14 15 3 47
f 46 14 3 47
f 49 28 5 27
f 48 49 5 27
f 47 25 58 26
f 85 5 84 27
f 47 85 3 26
f 9 80 31 79
f 47 25 26 3
f 3 85 25 26
f 85 47 3 4
f 48 49 27 60
f 59 48 27 60
f 31 9 53 52
f 50 61 62 29
f 33 32 78 66
f 31 80 30 42
f 40 29 82 81
f 63 62 30 51
f 30 63 51 52
f 41 29 40 81
f 33 32 66 54
f 9 80 68 69
f 82 71 70 7
f 5 84 83 72
f 32 43 78 79
f 29 28 40 82
f 5 84 27 83
f 33 32 44 78
f 9 80 79 68
f 8 7 19 51
f 28 27 39 83
f 28 39 82 83
f 28 50 49 6
f 65 64 32 53
f 82 71 7 6
f 29 82 81 7
f 81 70 8 7
f 84 38 27 83
f 27 38 39 83
f 32 43 44 78
f 9 31 32 79
f 31 80 42 79
f 31 42 43 79
f 67 78 10 11
f 32 9 10 53
f 62 50 29 51
f 71 82 83 6
f 49 16 5 17
f 81 82 70 7
f 50 49 6 17
f 79 68 67 10
f 79 9 68 10
f 50 6 18 17
f 78 10 11 66
f 78 79 67 10
f 32 9 79 10
f 78 32 79 66
f 79 32 10 66
f 78 79 10 66
f 9 10 53 21
f 82 28 83 6
f 71 83 72 6
f 69 81 70 8
f 64 31 53 52
f 28 5 83 6
f 83 5 72 6
f 80 30 81 8
f 28 49 5 6
f 64 63 31 52
f 80 81 69 8
f 9 80 69 8
f 7 50 6 18
f 80 9 31 8
f 80 31 30 8
f 31 63 30 52
f 31 30 8 52
f 30 81 8 7
f 30 29 81 7
f 19 8 51 52
f 33 65 32 54
f 49 5 6 17
f 10 54 21 22
f 29 50 7 51
f 30 29 7 51
f 65 32 54 53
f 9 53 52 20
f 31 9 32 53
f 64 31 32 53
f 32 10 66 53
f 66 10 54 53
f 32 66 54 53
f 8 30 51 52
f 8 9 52 20
f 19 8 52 20

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,387 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import numpy as np\n",
"import torch\n",
"import igl\n",
"\n",
"import meshplot as mp\n",
"import sys as _sys\n",
"_sys.path.append(\"../src\")\n",
"from elasticenergy import *\n",
"from elasticsolid import *\n",
"from adjoint_sensitivity import *\n",
"from vis_utils import *\n",
"from objectives import *\n",
"from harmonic_interpolator import *\n",
"from shape_optimizer import *\n",
"\n",
"from utils import *\n",
"\n",
"shadingOptions = {\n",
" \"flat\":True,\n",
" \"wireframe\":False, \n",
"}\n",
"\n",
"rot = np.array(\n",
" [[1, 0, 0 ],\n",
" [0, 0, 1],\n",
" [0, -1, 0 ]]\n",
")\n",
"\n",
"torch.set_default_dtype(torch.float64)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Create the deformed object\n",
"\n",
"## Load the mesh"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {
"scrolled": false
},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "9ae930440a43419c88fd82d71d7a6fa4",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Renderer(camera=PerspectiveCamera(children=(DirectionalLight(color='white', intensity=0.6, position=(-1.987469…"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"<meshplot.Viewer.Viewer at 0x7f86840382e0>"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"vNP, _, _, tNP, _, _ = igl.read_obj(\"../data/dinosaur.obj\")\n",
"# vNP, _, _, tNP, _, _ = igl.read_obj(\"../data/beam.obj\")\n",
"\n",
"aabb = np.max(vNP, axis=0) - np.min(vNP, axis=0)\n",
"length_scale = np.mean(aabb)\n",
"\n",
"\n",
"v, t = torch.tensor(vNP), torch.tensor(tNP)\n",
"eNP = igl.edges(tNP)\n",
"beNP = igl.edges(igl.boundary_facets(tNP))\n",
"\n",
"bvNP, ivNP = get_boundary_and_interior(v.shape[0], tNP)\n",
"\n",
"mp.plot(vNP @ rot.T, np.array(tNP), shading=shadingOptions)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Add some physical characteristics"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Pinned vertices: [ 46 47 50 59 60 62 64 65 88 89 91 98 99 102 103 104]\n"
]
}
],
"source": [
"rho = 131 # [kg.m-3], if aabb[0] ~ 14m, and m_tot = 6000kg\n",
"young = 3e8 # [Pa] \n",
"poisson = 0.2\n",
"\n",
"# Find some of the lowest vertices and pin them\n",
"minZ = torch.min(v[:, 2])\n",
"pin_idx = torch.arange(v.shape[0])[v[:, 2] < minZ + 0.01*aabb[2]]\n",
"vIdx = np.arange(v.shape[0])\n",
"pin_idx = vIdx[np.in1d(vIdx, bvNP) & np.in1d(vIdx, pin_idx)]\n",
"print(\"Pinned vertices: {}\".format(pin_idx))"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Initial guess\n",
"\n",
"The idea is that we start deforming the mesh by inverting gravity."
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {
"scrolled": false
},
"outputs": [
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "1ee888e94cd641f4bbbce4b051acccfb",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Renderer(camera=PerspectiveCamera(children=(DirectionalLight(color='white', intensity=0.6, position=(-2.468079…"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"# Inverted gravity\n",
"force_mass = torch.zeros(size=(3,))\n",
"force_mass[2] = + rho * 9.81\n",
"\n",
"# Gravity going in the wrong direction\n",
"\n",
"ee = NeoHookeanElasticEnergy(young, poisson)\n",
"\n",
"v = HarmonicInterpolator(v, t, ivNP).interpolate(v[bvNP])\n",
"solid_init = ElasticSolid(v, t, ee, rho=rho, pin_idx=pin_idx, f_mass=force_mass)\n",
"\n",
"solid_init.find_equilibrium()\n",
"plot_torch_solid(solid_init, beNP, rot, length_scale)\n",
"\n",
"# Use these as initial guesses\n",
"v_init_rest = solid_init.v_def.clone().detach()\n",
"v_init_def = solid_init.v_rest.clone().detach()\n",
"\n",
"# v_init_rest = solid_init.v_rest.clone().detach()\n",
"# v_init_def = solid_init.v_def.clone().detach()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Inverse design\n"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {
"scrolled": false
},
"outputs": [
{
"ename": "TypeError",
"evalue": "unsupported format string passed to NoneType.__format__",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m/tmp/ipykernel_46/1868162750.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0msolid_\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mupdate_def_shape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mv_init_def\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 16\u001b[0;31m \u001b[0moptimizer\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mShapeOptimizer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msolid_\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mvt_surf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mweight_reg\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m0.\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 17\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[0mv_eq_init\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0moptimizer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msolid\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mv_def\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mclone\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdetach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m#bookkeeping\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;32m/opt/notebooks/assignment_2_4/notebook/../src/shape_optimizer.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, solid, vt_surf, weight_reg)\u001b[0m\n\u001b[1;32m 48\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msolid\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mfind_equilibrium\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 49\u001b[0m \u001b[0mobj_init\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtgt_fit\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mobj\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msolid\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mv_def\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mclone\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdetach\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 50\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"Initial objective: {:.4e}\\n\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mformat\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mobj_init\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 51\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 52\u001b[0m \u001b[0;31m# Initialize grad\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mTypeError\u001b[0m: unsupported format string passed to NoneType.__format__"
]
}
],
"source": [
"force_mass = torch.zeros(size=(3,))\n",
"force_mass[2] = - rho * 9.81\n",
"use_linear = False\n",
"\n",
"# The target is the initial raw mesh\n",
"vt_surf = torch.tensor(vNP[bvNP, :])\n",
"\n",
"# Create solid\n",
"if use_linear:\n",
" ee = LinearElasticEnergy(young, poisson)\n",
"else:\n",
" ee = NeoHookeanElasticEnergy(young, poisson)\n",
"solid_ = ElasticSolid(v_init_rest, t, ee, rho=rho, pin_idx=pin_idx, f_mass=force_mass)\n",
"solid_.update_def_shape(v_init_def)\n",
"\n",
"optimizer = ShapeOptimizer(solid_, vt_surf, weight_reg=0.)\n",
"\n",
"v_eq_init = optimizer.solid.v_def.clone().detach() #bookkeeping"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"optimizer.optimize(step_size_init=1e-4, max_l_iter=10, n_optim_steps=40)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"\n",
"plt.figure(figsize=(10, 6))\n",
"plt.plot(to_numpy(optimizer.objectives[optimizer.objectives > 0]))\n",
"plt.title(\"Objective as optimization goes\", fontsize=14)\n",
"plt.xlabel(\"Optimization steps\", fontsize=12)\n",
"plt.ylabel(\"Objective\", fontsize=12)\n",
"plt.grid()\n",
"plt.show()"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Green (Initial guess for rest state) deploys to Black\n",
"\n",
"Blue (Optimized rest state) deploys to Yellow\n",
"\n",
"Red is the Target Shape\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"p = mp.plot(np.array(optimizer.solid.v_def) @ rot.T, tNP, shading=shadingOptions)\n",
"# p.add_points(np.array(optimizer.solid.v_def)[pin_idx, :] @ rot.T, shading={\"point_color\":\"black\", \"point_size\": 0.2})\n",
"p.add_edges(np.array(v_init_rest) @ rot.T, beNP, shading={\"line_color\": \"green\"})\n",
"p.add_edges(vNP @ rot.T, beNP, shading={\"line_color\": \"red\"})\n",
"p.add_edges(np.array(v_eq_init) @ rot.T, beNP, shading={\"line_color\": \"black\"})\n",
"p.add_edges(np.array(optimizer.solid.v_rest) @ rot.T, beNP, shading={\"line_color\": \"blue\"})\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"v_rest_optim_g = optimizer.solid.v_rest.clone().detach() #bookkeeping"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Add point load to the right most vertices\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"maxX = torch.min(v[:, 0])\n",
"f_point_idx = torch.arange(v.shape[0])[v[:, 0] > maxX - 0.01*aabb[0]]\n",
"\n",
"f_point = torch.zeros(size=(f_point_idx.shape[0], 3))\n",
"f_point[:, 2] = -5e3\n",
"\n",
"optimizer.solid.add_point_load(f_point_idx, f_point)\n",
"optimizer.set_params(optimizer.params)\n",
"v_def_optim_g_under_point = optimizer.solid.v_def.clone().detach() #bookkeeping"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"optimizer.reset_BFGS()\n",
"optimizer.optimize(step_size_init=1e-4, max_l_iter=10, n_optim_steps=20)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Green (Optimum rest state under gravity) deploys to Black with the additional point load\n",
"\n",
"Blue (Optimized rest state) deploys to Yellow\n",
"\n",
"Red is the Target Shape\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"p = mp.plot(np.array(optimizer.solid.v_def) @ rot.T, tNP, shading=shadingOptions)\n",
"# p.add_points(np.array(optimizer.solid.v_def)[pin_idx, :] @ rot.T, shading={\"point_color\":\"black\", \"point_size\": 0.2})\n",
"p.add_edges(np.array(v_rest_optim_g) @ rot.T, beNP, shading={\"line_color\": \"green\"})\n",
"p.add_edges(vNP @ rot.T, beNP, shading={\"line_color\": \"red\"})\n",
"p.add_edges(np.array(v_def_optim_g_under_point) @ rot.T, beNP, shading={\"line_color\": \"black\"})\n",
"p.add_edges(np.array(optimizer.solid.v_rest) @ rot.T, beNP, shading={\"line_color\": \"blue\"})\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.7"
},
"toc": {
"base_numbering": 1,
"nav_menu": {},
"number_sections": true,
"sideBar": true,
"skip_h1_title": false,
"title_cell": "Table of Contents",
"title_sidebar": "Contents",
"toc_cell": false,
"toc_position": {},
"toc_section_display": true,
"toc_window_display": false
}
},
"nbformat": 4,
"nbformat_minor": 4
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,31 @@
import torch
from utils import linear_solve
torch.set_default_dtype(torch.float64)
def compute_adjoint(solid ,dJdx_U):
'''
This assumes that S is at equilibrium when called
Input:
- solid : an elastic solid at equilibrium
- dJdx_U : array of shape (3*#unpinned,)
Output:
- adjoint : array of shape (3*#v,)
'''
y = torch.zeros_like(solid.v_rest)
dx0s = torch.zeros_like(solid.v_rest)
def LHS(dx):
'''
Should implement the Hessian-Vector product (taking into account pinning constraints) as described in the handout.
'''
dx0s[solid.free_idx] = dx.reshape(-1,3)
df0s = - solid.compute_force_differentials(dx0s)
return df0s[solid.free_idx].reshape(-1,)
RHS = dJdx_U.T.reshape(-1,)
y[solid.free_idx] = linear_solve(LHS, RHS).reshape(-1,3)
return y.reshape(-1,)

View File

@@ -0,0 +1,191 @@
import numpy as np
import torch
torch.set_default_dtype(torch.float64)
class ElasticEnergy:
def __init__(self, young, poisson):
'''
Input:
- young : Young's modulus [Pa]
- poisson : Poisson ratio
'''
self.young = young
self.poisson = poisson
self.lbda = young * poisson / ((1 + poisson) * (1 - 2 * poisson))
self.mu = young / (2 * (1 + poisson))
self.E = None
self.dE = None
self.psi = None
self.P = None
self.dP = None
def make_strain_tensor(self, jac):
'''
This method computes the strain tensor (#t, 3, 3), and stores it in self.E
Input:
- jac : jacobian of the deformation (#t, 3, 3)
'''
print("Please specify the kind of elasticity model.")
raise NotImplementedError
def make_differential_strain_tensor(self, jac, dJac):
'''
This method computes the differential of strain tensor (#t, 3, 3),
and stores it in self.dE
Input:
- jac : jacobian of the deformation (#t, 3, 3)
- dJac : differential of the jacobian of the deformation (#t, 3, 3)
'''
print("Please specify the kind of elasticity model.")
raise NotImplementedError
def make_energy_density(self, jac):
'''
This method computes the energy density at each tetrahedron (#t,),
and stores the result in self.psi
Input:
- jac : jacobian of the deformation (#t, 3, 3)
'''
print("Please specify the kind of elasticity model.")
raise NotImplementedError
def make_piola_kirchhoff_stress_tensor(self, jac):
'''
This method computes the stress tensor (#t, 3, 3), and stores it in self.P
Input:
- jac : jacobian of the deformation (#t, 3, 3)
'''
print("Please specify the kind of elasticity model.")
raise NotImplementedError
def make_differential_piola_kirchhoff_stress_tensor(self, jac, dJac):
'''
This method computes the stress tensor (#t, 3, 3), and stores it in self.P
Input:
- jac : jacobian of the deformation (#t, 3, 3)
- dJac : differential of the jacobian of the deformation (#t, 3, 3)
'''
print("Please specify the kind of elasticity model.")
raise NotImplementedError
class LinearElasticEnergy(ElasticEnergy):
def __init__(self, young, poisson):
super().__init__(young, poisson)
def make_energy_density(self, jac):
# First, update the strain tensor
self.make_strain_tensor(jac)
# psi = mu*E:E + lbda/2*Tr(E)^2
self.psi = (self.mu * torch.einsum('mij,mij->m', self.E, self.E) +
self.lbda/2 * torch.einsum('mii->m', self.E) ** 2)
pass
def make_strain_tensor(self, jac):
eye = torch.zeros((len(jac), 3, 3))
for i in range(3):
eye[:, i, i] = 1
# E = 1/2*(F + F^T) - I
self.E = 0.5*(torch.swapaxes(jac, 1, 2) + jac) - eye
pass
def make_piola_kirchhoff_stress_tensor(self, jac):
# First, update the strain tensor
self.make_strain_tensor(jac)
tr = torch.einsum('ijj->i', self.E)
eye = torch.zeros((len(self.E), 3, 3))
for i in range(3):
eye[:, i, i] = tr
# P = 2*mu*E + lbda*tr(E)*I =
self.P = 2 * self.mu * self.E + self.lbda * eye
pass
def make_differential_strain_tensor(self, jac, dJac):
# dE = 1/2*(dF + dF^T)
self.dE = 0.5 * (dJac + torch.swapaxes(dJac, 1, 2))
pass
def make_differential_piola_kirchhoff_stress_tensor(self, jac, dJac):
# First, update the differential of the strain tensor,
# and the strain tensor
self.make_strain_tensor(jac)
self.make_differential_strain_tensor(jac, dJac)
# Diagonal matrix
dtr = torch.einsum('ijj->i', self.dE)
dI = torch.zeros((len(jac), 3, 3))
for i in range(3):
dI[:, i, i] = dtr
# dP = 2*mu*dE + lbda*tr(dE)*I
self.dP = 2 * self.mu * self.dE + self.lbda * dI
pass
class NeoHookeanElasticEnergy(ElasticEnergy):
def __init__(self, young, poisson):
super().__init__(young, poisson)
self.logJ = None
self.Finv = None
def make_energy_density(self, jac):
# First, update the strain tensor
self.make_strain_tensor(jac)
# J = det(F)
# I1 = Tr(F^T.F)
# psi = mu/2*(I1 - 3 - 2*log(J)) + lbda/2*log(J)^2
logJ = torch.log(torch.linalg.det(jac))
I1 = torch.einsum('mji,mji->m', jac, jac)
self.psi = self.mu/2 * (I1 - 3 - 2*logJ) + self.lbda/2 * logJ**2
pass
def make_strain_tensor(self, jac):
pass
def make_piola_kirchhoff_stress_tensor(self, jac):
self.logJ = torch.log(torch.linalg.det(jac))
# First invert, then transpose
self.Finv = torch.linalg.inv(jac)
FinvT = torch.swapaxes(self.Finv, 1, 2)
# P = mu*(F - F^{-T}) + lbda*log(J)*F^{-T}
self.P = (self.mu * (jac - FinvT) + self.lbda * torch.einsum('i,ijk->ijk', self.logJ, FinvT))
pass
def make_differential_strain_tensor(self, jac, dJac):
pass
def make_differential_piola_kirchhoff_stress_tensor(self, jac, dJac):
# To be reused below
logJ = self.logJ.reshape(-1, 1, 1) # (#t, 1, 1) for shape broadcasting
FinvT = torch.swapaxes(self.Finv, 1, 2)
Fprod = torch.einsum("mij, mjk, mkl -> mil", FinvT, torch.swapaxes(dJac, 1, 2), FinvT)
trFinvdF = torch.einsum("mij, mji -> m", self.Finv, dJac)
# dP = mu*dF + (mu-lbda*log(J))*F^{-T}.dF^T.F^{-T} + lbda*tr(F^{-1}.dF)*F^{-T}
self.dP = (self.mu * dJac +
(self.mu - self.lbda * logJ) * Fprod +
self.lbda * torch.einsum("m, mij -> mij", trFinvdF, FinvT)
)
pass

View File

@@ -0,0 +1,458 @@
import numpy as np
import torch
import igl
from utils import *
torch.set_default_dtype(torch.float64)
def to_numpy(tensor):
return tensor.detach().clone().numpy()
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# ELASTIC SOLID CLASS (using PyTorch)
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
class ElasticSolid(object):
def __init__(self, v_rest, t, ee, rho=1, pin_idx=[], f_mass=None):
'''
Input:
- v_rest : position of the vertices of the mesh (#v, 3)
- t : indices of the element's vertices (#t, 4)
- ee : elastic energy object that can be found in elasticenergy.py
- rho : mass per unit volume [kg.m-3]
- pin_idx : list or torch tensor of vertex indices to pin
- f_mass : external force per unit mass (3,) [N.kg-1]
'''
self.v_rest = v_rest
self.v_def = v_rest
self.t = t
self.ee = ee
self.rho = rho
if not isinstance(pin_idx, torch.Tensor):
pin_idx = torch.tensor(pin_idx)
self.pin_idx = pin_idx
self.f_mass = f_mass
self.free_idx = None
self.pin_mask = None
self.W0 = None
self.Dm = None
self.Bm = None
self.rest_barycenters = None
self.W = None
self.Ds = None
self.F = None
self.def_barycenters = None
self.energy_el = None
self.energy_ext = None
self.f = None
self.f_vol = None
self.f_ext = None
self.f_point = torch.zeros_like(v_rest)
self.dF = None
self.make_free_indices_and_pin_mask()
self.update_rest_shape(self.v_rest)
self.update_def_shape(self.v_def)
## Utils ##
def vertex_tet_sum(self, data):
'''
Distributes data specified at each tetrahedron to the neighboring vertices.
All neighboring vertices will receive the value indicated at the corresponding tet position in data.
Input:
- data : torch array of shape (#t,)
Output:
- data_sum : torch array of shape (#v,), containing the summed data
'''
i = self.t.T.flatten() # (4*#t,)
j = torch.arange(self.t.shape[0]) # (#t,)
j = torch.tile(j, (4,)) # (4*#t,)
# Has shape (#v, #t), a bit less efficient than using sparse matrices
m = torch.zeros(size=(self.v_rest.shape[0], self.t.shape[0]), dtype=torch.float64)
m[i, j] = data
return torch.sum(m, dim=1)
## Precomputation ##
def make_free_indices_and_pin_mask(self):
'''
Should list all the free indices and the pin mask.
Updated attributes:
- free_index : torch tensor of shape (#free_vertices,) containing the list of unpinned vertices
- pin_mask : torch tensor of shape (#v, 1) containing 1 at free vertex indices and 0 at pinned vertex indices
'''
vi = torch.arange(self.v_rest.shape[0])
pin_filter = ~torch.isin(vi, self.pin_idx)
self.free_idx = vi[pin_filter]
self.pin_mask = torch.tensor([idx not in self.pin_idx
for idx in range(self.v_rest.shape[0])]).reshape(-1, 1)
## Methods related to rest quantities ##
def make_rest_barycenters(self):
'''
Construct the barycenters of the undeformed configuration
Updated attributes:
- rest_barycenters : torch tensor of shape (#t, 3) containing the position of each tet's barycenter
'''
self.rest_barycenters = torch.einsum('ijk -> ik', self.v_rest[self.t]) / 4
def make_rest_shape_matrices(self):
'''
Construct Ds that has shape (#t, 3, 3), and its inverse Bm
Updated attributes:
- Dm : torch tensor of shape (#t, 3, 3) containing the shape matrix of each tet
- Bm : torch tensor of shape (#t, 3, 3) containing the inverse shape matrix of each tet
'''
e1 = (self.v_rest[self.t[:, 0]] - self.v_rest[self.t[:, 3]])
e2 = (self.v_rest[self.t[:, 1]] - self.v_rest[self.t[:, 3]])
e3 = (self.v_rest[self.t[:, 2]] - self.v_rest[self.t[:, 3]])
Ds = torch.stack((e1, e2, e3), dim=2)
self.Dm = Ds
self.Bm = torch.linalg.inv(self.Dm)
def update_rest_shape(self, v_rest):
'''
Updates the vertex position, the shape matrices Dm and Bm, the volumes W0,
and the mass matrix at rest
Input:
- v_rest : position of the vertices of the mesh at rest state (#v, 3)
Updated attributes:
- v_rest : torch tensor of shape (#v, 3) containing the position of each vertex at rest
- W0 : torch tensor of shape (#t,) containing the signed volume of each tet
'''
self.v_rest = v_rest
self.make_rest_barycenters()
self.make_rest_shape_matrices()
self.W0 = - torch.linalg.det(self.Dm) / 6
self.make_volumetric_and_external_forces()
self.update_def_shape(self.v_def)
## Methods related to deformed quantities ##
def make_def_barycenters(self):
'''
Construct the barycenters of the deformed configuration
Updated attributes:
- def_barycenters : torch tensor of shape (#t, 3) containing the position of each tet's barycenter
'''
self.def_barycenters = torch.einsum('ijk -> ik', self.v_def[self.t]) / 4
def make_def_shape_matrices(self):
'''
Construct Ds that has shape (#t, 3, 3)
Updated attributes:
- Ds : torch tensor of shape (#t, 3, 3) containing the shape matrix of each tet
'''
e1 = (self.v_def[self.t[:, 0]] - self.v_def[self.t[:, 3]])
e2 = (self.v_def[self.t[:, 1]] - self.v_def[self.t[:, 3]])
e3 = (self.v_def[self.t[:, 2]] - self.v_def[self.t[:, 3]])
Ds = torch.stack((e1, e2, e3), dim=2)
self.Ds = Ds
def make_jacobians(self):
'''
Compute the current Jacobian of the deformation
Updated attributes:
- F : torch tensor of shape (#t, 3, 3) containing Jacobian of the deformation in each tet
'''
self.F = torch.einsum('lij,ljk->lik', self.Ds, self.Bm)
def update_def_shape(self, v_def):
'''
Updates the vertex position, the Jacobian of the deformation, and the
resulting elastic forces.
Input:
- v_def : position of the vertices of the mesh (#v, 3)
Updated attributes:
- v_def : torch tensor of shape (#v, 3) containing the position of each vertex after deforming the solid
- W : torch tensor of shape (#t,) containing the signed volume of each tet
'''
# Can only change the unpinned ones
self.v_def = (~self.pin_mask) * self.v_rest + self.pin_mask * v_def
self.make_def_barycenters()
self.make_def_shape_matrices()
self.make_jacobians()
self.W = - torch.linalg.det(self.Ds) / 6
self.make_elastic_forces()
self.make_elastic_energy()
self.make_external_energy()
def displace(self, v_disp):
'''
Displace the whole mesh so that v_def += v_disp
Input:
- v_disp : displacement of the vertices of the mesh (#v, 3)
'''
self.update_def_shape(self.v_def + v_disp)
## Energies ##
def make_elastic_energy(self):
'''
This updates the elastic energy
Updated attributes:
- energy_el : elastic energy of the system [J]
'''
self.ee.make_strain_tensor(self.F)
self.ee.make_energy_density(self.F)
self.energy_el = torch.sum(self.W0 * self.ee.psi)
def make_external_energy(self):
'''
This computes the external energy potential
Updated attributes:
- energy_ext : postential energy due to external forces [J]
'''
self.energy_ext = - torch.einsum('i, ij, ij->', self.W0, self.f_vol, self.def_barycenters - self.rest_barycenters)
# TODO: ADD F_POINT ENERGY
## Forces ##
def make_elastic_forces(self):
'''
This method updates the elastic forces stored in self.f (#v, 3)
Updated attributes:
- f : elastic forces per vertex (#v, 3)
- ee : elastic energy, some attributes should be updated
'''
# First update strain/stress tensor, stored in self.ee
self.ee.make_strain_tensor(self.F)
self.ee.make_piola_kirchhoff_stress_tensor(self.F)
# H[el] = - W0[el]*P.Bm[el]^T
H = torch.einsum('lij,ljk->lik', self.ee.P, torch.swapaxes(self.Bm, 1, 2))
H = - torch.einsum('i,ijk->ijk', self.W0, H)
# Extract forces from H of shape (#t, 3, 3)
# We look at each component separately, then stack them in a vector of shape (4*#t,)
# Then we distribute the contributions to each vertex
fx = self.vertex_tet_sum(torch.hstack((H[:, 0, 0], H[:, 0, 1], H[:, 0, 2],
-H[:, 0, 0] - H[:, 0, 1] - H[:, 0, 2])))
fy = self.vertex_tet_sum(torch.hstack((H[:, 1, 0], H[:, 1, 1], H[:, 1, 2],
-H[:, 1, 0] - H[:, 1, 1] - H[:, 1, 2])))
fz = self.vertex_tet_sum(torch.hstack((H[:, 2, 0], H[:, 2, 1], H[:, 2, 2],
-H[:, 2, 0] - H[:, 2, 1] - H[:, 2, 2])))
# We stack them in a tensor of shape (#v, 3)
self.f = torch.column_stack((fx, fy, fz))
def add_point_load(self, v_idx, f_point):
'''
Input:
- v_idx : tensor of size (n_idx,) containing the list of vertex ids
- f_point : tensor of size (n_idx, 3) containing force to add at each vertex
Updated attributes:
- f_point : tensor of size (#v, 3)
'''
self.f_point[v_idx] += f_point
self.make_volumetric_and_external_forces()
self.make_external_energy()
def make_volumetric_and_external_forces(self):
'''
Convert force per unit mass to volumetric forces, then distribute
the forces to the vertices of the mesh.
Updated attributes:
- f_vol : torch tensor of shape (#t, 3) external force per unit volume acting on the tets
- f_ext : torch tensor of shape (#v, 3) external force acting on the vertices
'''
self.f_vol = torch.tile((self.rho * self.f_mass), (self.t.shape[0], 1)) # (#t, 3)
int_f_vol = torch.einsum('i, ij -> ij', self.W0, self.f_vol)
# from (#t,) to (4*#t,)
j = torch.arange(len(self.t))
j = torch.tile(j, (4,))
int_f_vol_tiled = int_f_vol[j]
f_ext_x = self.vertex_tet_sum(int_f_vol_tiled[:, 0])
f_ext_y = self.vertex_tet_sum(int_f_vol_tiled[:, 1])
f_ext_z = self.vertex_tet_sum(int_f_vol_tiled[:, 2])
self.f_ext = torch.stack((f_ext_x, f_ext_y, f_ext_z), dim=1) / 4 + self.f_point
## Force Differentials
def compute_force_differentials(self, v_disp):
'''
This computes the differential of the force given a displacement dx,
where df = df/dx|x . dx = - K(x).dx. The matrix vector product K(x)w
is then given by the call self.compute_force_differentials(-w).
Input:
- v_disp : displacement of the vertices of the mesh (#v, 3)
Output:
- df : force differentials at the vertices of the mesh (#v, 3)
Updated attributes:
- ee : elastic energy, some attributes should be updated
'''
# Compute the displacement differentials
d1 = (v_disp[self.t[:, 0]] - v_disp[self.t[:, 3]]).reshape(-1, 3, 1)
d2 = (v_disp[self.t[:, 1]] - v_disp[self.t[:, 3]]).reshape(-1, 3, 1)
d3 = (v_disp[self.t[:, 2]] - v_disp[self.t[:, 3]]).reshape(-1, 3, 1)
dDs = torch.cat((d1, d2, d3), dim=2)
# Differential of the Jacobian
dF = torch.einsum('lij,ljk->lik', dDs, self.Bm)
# Differential of the stress tensor (uses the current stress tensor)
self.ee.make_differential_strain_tensor(self.F, dF)
self.ee.make_differential_piola_kirchhoff_stress_tensor(self.F, dF)
# Differential of the forces
dH = torch.einsum('lij,ljk->lik', self.ee.dP, torch.swapaxes(self.Bm, 1, 2))
dH = - torch.einsum('i,ijk->ijk', self.W0, dH)
# Same as for the elastic forces
dfx = self.vertex_tet_sum(torch.hstack((dH[:, 0, 0], dH[:, 0, 1], dH[:, 0, 2],
-dH[:, 0, 0] - dH[:, 0, 1] - dH[:, 0, 2])))
dfy = self.vertex_tet_sum(torch.hstack((dH[:, 1, 0], dH[:, 1, 1], dH[:, 1, 2],
-dH[:, 1, 0] - dH[:, 1, 1] - dH[:, 1, 2])))
dfz = self.vertex_tet_sum(torch.hstack((dH[:, 2, 0], dH[:, 2, 1], dH[:, 2, 2],
-dH[:, 2, 0] - dH[:, 2, 1] - dH[:, 2, 2])))
# We stack them in a tensor of shape (#v, 3)
return torch.column_stack((dfx, dfy, dfz))
def equilibrium_step(self, verbose=False):
'''
This function displaces the whole solid to the next deformed configuration
using a Newton-CG step.
Input:
- verbose : whether or not to display quantities
Updated attributes:
- LHS : The hessian vector product
- RHS : Right hand side for the conjugate gradient linear solve
Other than them, only attributes updated by displace(self, v_disp) should be changed
'''
dx0s = torch.zeros_like(self.v_rest)
# Define LHS
def LHS(dx):
'''
Should implement the Hessian-Vector Product L(dx), and take care of pinning constraints
as described in the handout.
'''
dx0s[self.free_idx] = dx.reshape(-1,3)
df0s = - self.compute_force_differentials(dx0s)
return df0s[self.free_idx, :].reshape(-1,)
self.LHS = LHS # Save to class for testing
# Define RHS
ft = self.f + self.f_ext
RHS = ft[self.free_idx, :].reshape(-1,)
self.RHS = RHS # Save to class for testing
dx = conjugate_gradient(LHS, RHS)
dx0s[self.free_idx] = dx.reshape(-1, 3)
# Run line search on the direction
step_size = 2
ft_free = RHS
g_old = torch.linalg.norm(ft_free)
max_l_iter = 20
for l_iter in range(max_l_iter):
step_size *= 0.5
dx_search = dx0s * step_size
energy_tot_prev = self.energy_el + self.energy_ext
self.displace(dx_search)
ft_new = (self.f_ext + self.f)[self.free_idx].reshape(-1,)
g = torch.linalg.norm(ft_new)
energy_tot_tmp = self.energy_el + self.energy_ext
armijo = energy_tot_tmp < energy_tot_prev - 1e-4*step_size*torch.sum(dx.reshape(-1,)*ft_free)
if armijo or l_iter == max_l_iter-1:
if verbose:
print("Energy: " + str(energy_tot_tmp) + " Force residual norm: " + str(g) + " Line search Iters: " + str(l_iter))
break
else:
self.displace(-dx_search)
def find_equilibrium(self, n_steps=100, thresh=1., verbose=False):
'''
Input:
- n_steps : maximum number of optimization steps
- thresh : threshold on the force value [N]
'''
for i in range(n_steps):
# Take a Newton-CG step
self.equilibrium_step(verbose=verbose)
assert not torch.isnan(self.energy_el)
# Measure the force residuals
residuals_tmp = torch.linalg.norm((self.f + self.f_ext)[self.free_idx, :])
if residuals_tmp < thresh:
break
if verbose: print("Final residuals (equilibrium): {:.2e}".format(to_numpy(residuals_tmp)))
def vertex_tet_sum(v, t, data):
'''
Distributes data specified at each tetrahedron to the neighboring vertices.
All neighboring vertices will receive the value indicated at the corresponding tet position in data.
Input:
- num_vert : number of vertices
- t : connectivity of the volumetric mesh (#t, 4)
- data : torch array of shape (#t,)
Output:
- data_sum : torch array of shape (#v,), containing the summed data
'''
i = t.T.flatten() # (4*#t,)
j = torch.arange(t.shape[0]) # (#t,)
j = torch.tile(j, (4,)) # (4*#t,)
# Has shape (#v, #t), a bit less efficient than using sparse matrices
m = torch.zeros(size=(v.shape[0], t.shape[0]), dtype=torch.float64)
m[i, j] = data
return torch.sum(m, dim=1)

View File

@@ -0,0 +1,88 @@
import torch
import igl
import numpy as np
torch.set_default_dtype(torch.float64)
def to_numpy(tensor):
return tensor.detach().clone().numpy()
class HarmonicInterpolator:
'''
Provides a way to interpolate boundary vertices
Attributes:
- lii : block of the Laplacian matrix associated to
interior vertices (#iv, #iv)
- lib : block of the Laplacian matrix associated to
interior/boundary vertices (#iv, #v - #iv)
- lii_inv : inverse of lii (#iv, #iv)
'''
def __init__(self, v, t, iv):
'''
Input:
- v : tensor of shape (#v, 3)
- t : tensor of shape (#t, 4)
- iv : np array containing interior vertices (#iv,)
'''
self.lii = None
self.lib = None
self.lii_inv = None
self.iv = None
self.bv = None
self.update_interpolator(v, t, iv)
def update_interpolator(self, v, t, iv):
'''
Input:
- v : tensor of shape (#v, 3)
- t : tensor of shape (#t, 4)
- iv : np array containing interior vertices (#iv,)
'''
vNP = to_numpy(v)
tNP = to_numpy(t)
self.iv = iv
v_idx = np.arange(v.shape[0])
self.bv = v_idx[np.invert(np.in1d(v_idx, iv))]
l = igl.cotmatrix(vNP, tNP).todense()
self.lii = torch.tensor(l[iv[:, np.newaxis], self.iv])
self.lib = torch.tensor(l[iv[:, np.newaxis], self.bv])
self.lii_inv = torch.linalg.inv(self.lii)
def interpolate(self, v_surf):
'''
Input:
- v_surf : tensor of shape (#v - #iv, 3) for which the interior vertices
should be interpolated
Output:
- v_inter : tensor of shape (#v, 3) for which the interior vertices
have been interpolated from vertices of v_surf
'''
n_v = self.bv.shape[0] + self.iv.shape[0]
v_inter = torch.zeros(size=(n_v, 3))
v_inter[self.bv, :] = v_surf
v_inter[self.iv, :] = - self.lii_inv @ self.lib @ v_surf
return v_inter
def interpolate_fill(self, v_vol):
'''
Input:
- v_vol : tensor of shape (#v, 3) for which the interior vertices
should be interpolated
Output:
- v_inter : tensor of shape (#v, 3) for which the interior vertices
have been interpolated from vertices of v_surf
'''
n_v = self.bv.shape[0] + self.iv.shape[0]
assert n_v == v_vol.shape[0]
v_inter = v_vol
v_inter[self.iv, :] = - self.lii_inv @ self.lib @ v_vol[self.bv, :]
return v_inter

View File

@@ -0,0 +1,119 @@
from elasticenergy import *
from elasticsolid import *
import torch
class ObjectiveBV:
def __init__(self, vt_surf, bv):
self.vt_surf, self.bv= vt_surf, bv
def obj(self, v):
return objective_target_BV(v, self.vt_surf, self.bv)
def grad(self, v):
return grad_objective_target_BV(v, self.vt_surf, self.bv)
class ObjectiveReg:
def __init__(self, params_init, params_idx, harm_int, weight_reg=0, force_scale=1e7, length_scale=1.):
self.params_init, self.params_idx, self.harm_int = params_init, params_idx, harm_int
self.weight_reg, self.force_scale, self.length_scale = weight_reg, force_scale, length_scale
def obj(self, solid, params_tmp):
if self.weight_reg == 0: return 0
return self.length_scale * self.weight_reg / self.force_scale *regularization_neo_hookean(self.params_init, solid, params_tmp, self.params_idx, self.harm_int)
def grad(self, solid, params_tmp):
if self.weight_reg == 0: return torch.zeros_like(self.params_init)
return self.length_scale * self.weight_reg / self.force_scale *regularization_grad_neo_hookean(self.params_init, solid, params_tmp, self.params_idx, self.harm_int)
def objective_target_BV(v, vt, bv):
'''
Input:
- v : array of shape (#v, 3), containing the current vertices position
- vt : array of shape (#bv, 3), containing the target surface
- bv : boundary vertices index (#bv,)
Output:
- objective : single scalar measuring the deviation from the target shape
'''
vc = v[bv]
J = 0.5 * torch.norm(vc-vt)**2
return J
def grad_objective_target_BV(v, vt, bv):
'''
Input:
- v : array of shape (#v, 3), containing the current vertices position
- vt : array of shape (#bv, 3), containing the target surface
- bv : boundary vertices (#bv,)
Output:
- gradient : array of shape (#v, 3)
'''
vc = v[bv]
dJ = vc - vt
res = torch.zeros(v.shape)
res[bv] = dJ
return res
def regularization_neo_hookean(params_prev, solid, params, params_idx, harm_int):
'''
Input:
- params_prev : array of shape (3*#params,) containing the previous shape
- solid : an elastic solid to copy
- params : array of shape (3*#params,) containing the new shape parameters
- params_idx : parameters index in the vertex list. Has shape (#params,)
- harm_int : an harmonic interpolator
Output:
- energy : the neo hookean energy
'''
v_prev = solid.v_rest.detach()
v_prev[params_idx, :] = params_prev.reshape(-1, 3)
v_prev = harm_int.interpolate_fill(v_prev)
f_mass = torch.zeros(size=(3,))
ee_tmp = NeoHookeanElasticEnergy(solid.ee.young, solid.ee.poisson)
solid_virtual = ElasticSolid(v_prev, solid.t, ee_tmp, rho=solid.rho,
pin_idx=solid.pin_idx, f_mass=f_mass)
v_new = solid_virtual.v_rest.detach()
v_new[params_idx, :] = params.reshape(-1, 3)
v_new = harm_int.interpolate_fill(v_new)
solid_virtual.update_def_shape(v_new)
solid_virtual.make_elastic_energy()
return solid_virtual.energy_el
def regularization_grad_neo_hookean(params_prev, solid, params, params_idx, harm_int):
'''
Input:
- params_prev : array of shape (3*#params,) containing the previous shape
- solid : an elastic solid to copy
- params : array of shape (3*#params,) containing the new shape parameters
- params_idx : parameters index in the vertex list. Has shape (#params,)
- harm_int : an harmonic interpolator
Output:
- grad_reg : array of shape (3*#params,), the regularization gradient
'''
v_prev = solid.v_rest.detach()
v_prev[params_idx, :] = params_prev.reshape(-1, 3)
v_prev = harm_int.interpolate_fill(v_prev)
ee_tmp = NeoHookeanElasticEnergy(solid.ee.young, solid.ee.poisson)
solid_virtual = ElasticSolid(v_prev, solid.t, ee_tmp, rho=solid.rho,
pin_idx=solid.pin_idx, f_mass=solid.f_mass)
v_new = solid_virtual.v_rest.detach()
v_new[params_idx, :] = params.reshape(-1, 3)
v_new = harm_int.interpolate_fill(v_new)
solid_virtual.update_def_shape(v_new)
# Negative of the elastic forces: gradient of the energy
grad_reg = - solid_virtual.f[params_idx].reshape(-1,)
return grad_reg

View File

@@ -0,0 +1,310 @@
import torch
from elasticsolid import *
from objectives import *
from harmonic_interpolator import *
from adjoint_sensitivity import *
from IPython import display
import time
from vis_utils import plot_torch_solid
from utils import *
torch.set_default_dtype(torch.float64)
class ShapeOptimizer():
def __init__(self, solid, vt_surf, weight_reg=0.):
# Elastic Solid with the initial rest vertices stored
self.solid = solid
# Mesh info of solid
bvNP, ivNP = get_boundary_and_interior(solid.v_rest.shape[0], to_numpy(solid.t))
self.bvNP, self.ivNP = bvNP, ivNP
self.beNP = igl.edges(igl.boundary_facets(to_numpy(solid.t)))
# Initialize Laplacian/Harmonic Interpolator
v_init_rest = solid.v_rest.clone().detach()
self.harm_int = HarmonicInterpolator(v_init_rest, solid.t, ivNP)
# Initialize interior vertices with harmonic interpolation
self.v_init_rest = self.harm_int.interpolate(v_init_rest[bvNP])
solid.update_rest_shape(self.v_init_rest)
# Define optimization params and their indices
self.params_idx = torch.tensor(np.intersect1d(to_numpy(solid.free_idx), bvNP))
params_init = v_init_rest[self.params_idx].reshape(-1,)
self.params, self.params_prev = params_init.clone(), params_init.clone() # At time step t, t-1
# Target surface and Objectives
self.vt_surf = vt_surf
self.tgt_fit = ObjectiveBV(vt_surf, bvNP)
self.neo_reg = ObjectiveReg(params_init.clone().detach(), self.params_idx, self.harm_int, weight_reg = weight_reg)
# Compute equilibrium deformation
self.solid.find_equilibrium()
obj_init = self.tgt_fit.obj(solid.v_def.clone().detach())
print("Initial objective: {:.4e}\n".format(obj_init))
# Initialize grad
self.grad = torch.zeros(size=(3 * self.params_idx.shape[0],))
# BFGS book-keeping
self.invB = torch.eye(3 * self.params_idx.shape[0])
self.grad_prev = torch.zeros(size=(3 * self.params_idx.shape[0],))
def compute_gradient(self):
'''
Computes the full gradient including the forward simulation and regularization.
Updated attributes:
- grad : torch.tensor of shape (#params,)
'''
# dJ/dx from Target Fitting
dJ_dx = self.tgt_fit.grad(self.solid.v_def.clone().detach())[self.solid.free_idx]
self.grad = gradient_helper_autograd(self.solid, dJ_dx, self.params, self.params_idx, self.harm_int)
# Add regularization gradient
self.grad = self.grad + self.neo_reg.grad(self.solid, self.params)
return self.grad
def update_BFGS(self):
'''
Update BFGS hessian inverse approximation
Updated attributes:
- invB : torch.tensor of shape (#params, #params)
'''
sk = self.params - self.params_prev
yk = self.grad - self.grad_prev
self.invB = compute_inverse_approximate_hessian_matrix(sk.reshape(-1, 1), yk.reshape(-1, 1), self.invB)
def reset_BFGS(self):
'''
Reset BFGS hessian inverse approximation to Identity
Updated attributes:
- invB : torch.tensor of shape (#params, #params)
'''
self.invB = torch.eye(3 * self.params_idx.shape[0])
def set_params(self, params_in, verbose=False):
'''
Set optimization params to the input params_in
Input:
- params_in : Input params to set the solid to, torch.tensor of shape (#params,)
- verbose : Boolean specifying the verbosity of the equilibrium solve
Updated attributes:
- solid : Elastic solid, specifically, the rest shape and consequently the equilibrium deformation
'''
# From the input params_in, interpolate all the rest vertices using harm_int
v_search = self.solid.v_rest.clone()
v_search[self.params_idx, :] = params_in.reshape(-1, 3)
v_search = self.harm_int.interpolate_fill(v_search)
# Update solid using the new rest shape v_search
self.solid.update_rest_shape(v_search)
# Update equilibrium deformation
self.solid.find_equilibrium(verbose=verbose)
def compute_obj(self):
'''
Compute Objective at the current params
Output:
- obj : Accumulated objective value
'''
obj = self.tgt_fit.obj(self.solid.v_def.clone().detach())
obj += self.neo_reg.obj(self.solid, self.params)
return obj
def line_search_step(self, step_size_init, max_l_iter):
'''
Perform line search to take a step in the BFGS descent direction at the current optimization state
Input:
- step_size_init : Initial value of step size
- max_l_iter : Maximum iterations of line search
Updated attributes:
- solid : Elastic solid, specifically, the rest shape and consequently the equilibrium deformation
- params, params_prev : torch Tensor of shape (#params,)
- grad_prev : torch Tensor of shape (#params,)
Output:
- obj_new : New objective value after taking the step
- l_iter : Number of line search iterations taken
'''
step_size = step_size_init
# Compute previous objective for armijo rule
obj_prev = self.compute_obj()
success = False
for l_iter in range(max_l_iter):
step_size *= 0.5
# BFGS descent step - TODO
descent_dir = - self.invB @ self.grad
params_search = self.params + step_size * descent_dir
# Try taking a step
try:
# Save solid rest and def states in case steps fail
rest_prev, def_prev = self.solid.v_rest.clone().detach(), self.solid.v_def.clone().detach()
# Take a Step - TODO
self.set_params(params_search,False)
# Compute new objective - TODO
obj_search = self.compute_obj()
assert not torch.isnan(obj_search), "nan encountered"
# Evaluate armijo condition - TODO
armijo = obj_search < obj_prev + 10e-4 * step_size*((params_search*self.grad).sum())
if armijo or l_iter == max_l_iter-1:
self.params_prev = self.params.clone().detach() # BFGS bookkeeping
self.grad_prev = self.grad.clone() # BFGS bookkeeping
self.params, obj_new = params_search, obj_search # params at t becomes params at t+1
success = True
break
# Fallback to previous in case we take a step that is too large
except Exception as e:
print("An exception occured: {}. \nHalving the step.".format(e))
self.solid.update_rest_shape(rest_prev) # fall back to non NaN values
self.solid.update_def_shape(def_prev) # faster convergence fo the next equilibrium
if l_iter == max_l_iter - 1:
return obj_prev, l_iter, success
return obj_new, l_iter, success
def optimize(self, step_size_init=1e-3, max_l_iter=10, n_optim_steps=10):
'''
Run BFGS to optimize over the objective J.
Input:
- step_size_init : Initial value of step size
- max_l_iter : Maximum iterations of line search
Updated attributes:
- objectives : Tensor tracking objectives across optimization steps
- grad_mags : Tensor tracking norms of the gradient across optimization steps
'''
self.objectives = torch.zeros(size=(n_optim_steps+1,))
self.grad_mags = torch.zeros(size=(n_optim_steps,))
self.objectives[0] = self.compute_obj()
startTime = time.time()
for i in range(n_optim_steps):
# Update the gradients
self.grad_mags[i] = torch.linalg.norm(self.compute_gradient())
# Update quatities of BFGS
if i >= 1:
self.update_BFGS()
# Line Search
'''
Updates self.objectives[i+1], l_iter, success on returning
'''
self.objectives[i+1], l_iter, success = self.line_search_step(step_size_init, max_l_iter)
if not success:
print("Line search can't find a step to take")
return
display.clear_output(wait=True)
# Remaining time
curr_time = (time.time() - startTime)
rem_time = (n_optim_steps - i - 1) / (i + 1) * curr_time
print("Objective after {} optimization step(s): {:.4e}".format(i+1, self.objectives[i+1]))
print(" Line search Iters: " + str(l_iter))
print("Elapsed time: {:.1f}s. \nEstimated remaining time: {:.1f}s\n".format(curr_time, rem_time))
# Plot the resulting mesh
rot = np.array(
[[1, 0, 0 ],
[0, 0, 1],
[0, -1, 0 ]]
)
aabb = np.max(to_numpy(self.solid.v_rest), axis=0) - np.min(to_numpy(self.solid.v_rest), axis=0)
length_scale = np.mean(aabb)
plot_torch_solid(self.solid, self.beNP, rot, length_scale, target_mesh=self.vt_surf)
# # Early Termination
# if (self.objectives[i] - self.objectives[i+1]) < 1e-3 * self.objectives[i]:
# print("Decrease regularization weight.")
# self.neo_reg.weight_reg *= 0.5
# invB = torch.eye(3 * self.params_idx.shape[0])
# if (self.objectives[i] - self.objectives[i+1]) < 1e-5 * self.objectives[i]:
# print("Stop optimization due to non satisfactory relative progress on the objective.")
# break
def gradient_helper_autograd(solid, dJ_dx, params_tmp, params_idx, harm_int):
'''
Computes a pytorch computational flow to compute the gradient of the forward simulation through automatic differentiation
'''
# Adjoint state y
adjoint = compute_adjoint(solid, dJ_dx)
# Define the variable to collect the gradient of f_tot. y
params_collect = params_tmp.clone()
params_collect.requires_grad = True
# Model f_tot.y as a differentiable pytorch function of params to collect gradient from auto_grad
dot_prod = adjoint_dot_forces(params_collect, solid, adjoint, params_idx, harm_int)
dot_prod.backward()
dJ_dX = params_collect.grad.clone()
params_collect.grad = None # Reset grad
return dJ_dX
def adjoint_dot_forces(params, solid, adjoint, params_idx, harm_int):
'''
Input:
- params : array of shape (3*#params,)
- solid : an elastic solid to copy with the deformation at equilibrium
- adjoint : array of shape (3*nV,)
- params_idx : parameters index in the vertex list. Has shape (#params,)
- harm_int : harmonic interpolator
Output:
- dot_prod : dot product between forces at equilibrium and adjoint state vector
'''
# From params, compute the full rest state using the harmonic interpolation
v_vol = solid.v_rest.detach()
v_vol[params_idx, :] = params.reshape(-1, 3)
v_update = harm_int.interpolate_fill(v_vol)
#Initialize a solid with this rest state and the same deformed state of the solid
if "LinearElasticEnergy" in str(type(solid.ee)):
ee_tmp = LinearElasticEnergy(solid.ee.young, solid.ee.poisson)
elif "NeoHookeanElasticEnergy" in str(type(solid.ee)):
ee_tmp = NeoHookeanElasticEnergy(solid.ee.young, solid.ee.poisson)
solid_tmp = ElasticSolid(v_update, solid.t, ee_tmp, rho=solid.rho,
pin_idx=solid.pin_idx, f_mass=solid.f_mass)
solid_tmp.update_def_shape(solid.v_def.clone().detach())
return adjoint.detach() @ (solid_tmp.f + solid_tmp.f_ext).reshape(-1,)

View File

@@ -0,0 +1,266 @@
import torch
import igl
import numpy as np
import matplotlib.pyplot as plt
import meshplot as mp
import time
from scipy.sparse.linalg import cg
from scipy.sparse.linalg import LinearOperator
torch.set_default_dtype(torch.float64)
def to_numpy(tensor):
return tensor.detach().clone().numpy()
def get_boundary_and_interior(vlen, t):
bv = np.unique(igl.boundary_facets(t))
vIdx = np.arange(vlen)
iv = vIdx[np.invert(np.in1d(vIdx, bv))]
return bv, iv
def conjugate_gradient(L_method, b):
'''
Finds an inexact Newton descent direction using Conjugate Gradient (CG)
Solves partially L(x) = b where A is positive definite, using CG.
The method should be implemented to check whether the added direction is
an ascent direction or not, and whether the residuals are small enough.
Details can be found in the handout.
Input:
- L_method : a method that computes the Hessian vector product. It should
take an array of shape (n,) and return an array of shape (n,)
- b : right hand side of the linear system (n,)
Output:
- p_star : torch array of shape (n,) solving the linear system approximately
'''
n = b.shape[0]
p_star = torch.zeros_like(b)
residual = - b
direction = - residual.clone()
grad_norm = torch.linalg.norm(b)
# Reusable quantities
L_direction = L_method(direction) # L d
res_norm_sq = torch.sum(residual ** 2, dim=0) # r^T r
dir_norm_sq_L = direction @ L_direction
for k in range(n):
if dir_norm_sq_L <= 0:
if k == 0:
return b
else:
return p_star
# Compute the new guess for the solution
alpha = res_norm_sq / dir_norm_sq_L
p_star = p_star + direction * alpha
residual = residual + L_direction * alpha
# Check that the new residual norm is small enough
new_res_norm_sq = torch.sum(residual ** 2, dim=0)
if torch.sqrt(new_res_norm_sq) < min(0.5, torch.sqrt(grad_norm))*grad_norm:
break
# Update quantities
beta = new_res_norm_sq / res_norm_sq
direction = - residual + direction * beta
L_direction = L_method(direction)
res_norm_sq = new_res_norm_sq
dir_norm_sq_L = direction@L_direction
# print("Num conjugate gradient steps: " + str(k))
return p_star
def linear_solve(L_method, b):
'''
Solves Ax = b where A is positive definite.
A has shape (n, n), x and b have shape (n,).
Input:
- L_method : a method that takes x and returns the product Ax
- b : right hand side of the linear system
Output:
- x_star : np array of shape (n,) solving the linear system
'''
dim = b.shape[0]
def LHSnp(x):
return to_numpy(L_method(torch.tensor(x)))
LHS_op = LinearOperator((dim, dim), matvec=LHSnp)
x_star_np, _ = cg(LHS_op, to_numpy(b))
x_star = torch.tensor(x_star_np)
return x_star
def compute_inverse_approximate_hessian_matrix(sk, yk, invB_prev):
'''
Input:
- sk : previous step x_{k+1} - x_k, shape (n, 1)
- yk : grad(f)_{k+1} - grad(f)_{k}, shape (n, 1)
- invB_prev : previous Hessian estimate Bk, shape (n, n)
Output:
- invB_new : previous Hessian estimate Bk, shape (n, n)
'''
invB_new = invB_prev.clone()
invB_new += (sk.T @ yk + yk.T @ invB_prev @ yk) / ((sk.T @ yk) ** 2) * (sk @ sk.T)
prod = (invB_prev @ yk) @ sk.T
invB_new -= (prod + prod.T) / (sk.T @ yk)
return invB_new
def equilibrium_convergence_report_NCG(solid, v_init, n_steps, thresh=1e-3):
'''
Finds the equilibrium by minimizing the total energy using Newton CG.
Input:
- solid : an elastic solid to optimize
- v_init : the initial guess for the equilibrium position
- n_step : number of optimization steps
- thresh : threshold to stop the optimization process on the gradient's magnitude
Ouput:
- report : a dictionary containing various quantities of interest
'''
solid.update_def_shape(v_init)
energies_el = np.zeros(shape=(n_steps+1,))
energies_ext = np.zeros(shape=(n_steps+1,))
residuals = np.zeros(shape=(n_steps+1,))
times = np.zeros(shape=(n_steps+1,))
energies_el[0] = solid.energy_el
energies_ext[0] = solid.energy_ext
residuals[0] = torch.linalg.norm((solid.f + solid.f_ext)[solid.free_idx, :])
idx_stop = n_steps
t_start = time.time()
for i in range(n_steps):
# Take a Newton step
solid.equilibrium_step()
# Measure the force residuals
energies_el[i+1] = solid.energy_el
energies_ext[i+1] = solid.energy_ext
residuals[i+1] = torch.linalg.norm((solid.f + solid.f_ext)[solid.free_idx, :])
if residuals[i+1] < thresh:
residuals[i+1:] = residuals[i+1]
energies_el[i+1:] = energies_el[i+1]
energies_ext[i+1:] = energies_ext[i+1]
idx_stop = i
break
times[i+1] = time.time() - t_start
report = {}
report['energies_el'] = energies_el
report['energies_ext'] = energies_ext
report['residuals'] = residuals
report['times'] = times
report['idx_stop'] = idx_stop
return report
def fd_validation_ext(solid):
epsilons = torch.logspace(-9, -3, 100)
perturb_global = 1e-3 * (2. * torch.rand(size=solid.v_def.shape) - 1.)
solid.displace(perturb_global)
v_def = solid.v_def.clone()
perturb = 2. * torch.rand(size=solid.v_def.shape) - 1.
errors = []
for eps in epsilons:
# Back to original
solid.update_def_shape(v_def)
grad = torch.zeros(solid.f_ext.shape)
grad[solid.free_idx] = -solid.f_ext[solid.free_idx]
an_delta_E = (grad*perturb).sum()
# One step forward
solid.displace(perturb * eps)
E1 = solid.energy_ext.clone()
# Two steps backward
solid.displace(-2*perturb * eps)
E2 = solid.energy_ext.clone()
# Compute error
fd_delta_E = (E1 - E2)/(2*eps)
errors.append(abs(fd_delta_E - an_delta_E)/abs(an_delta_E))
plt.loglog(epsilons, errors)
plt.grid()
plt.show()
def fd_validation_elastic(solid):
epsilons = torch.logspace(-9, -3, 100)
perturb_global = 1e-3 * (2. * torch.rand(size=solid.v_def.shape) - 1.)
solid.displace(perturb_global)
v_def = solid.v_def.clone()
solid.make_elastic_forces()
perturb = 2. * torch.rand(size=solid.v_def.shape) - 1.
errors = []
for eps in epsilons:
# Back to original
solid.update_def_shape(v_def)
solid.make_elastic_forces()
grad = torch.zeros(solid.f.shape)
grad[solid.free_idx] = -solid.f[solid.free_idx]
an_delta_E = (grad*perturb).sum()
# One step forward
solid.displace(perturb * eps)
E1 = solid.energy_el.clone()
# Two steps backward
solid.displace(-2*perturb * eps)
E2 = solid.energy_el.clone()
# Compute error
fd_delta_E = (E1 - E2)/(2*eps)
errors.append(abs(fd_delta_E - an_delta_E)/abs(an_delta_E))
plt.loglog(epsilons, errors)
plt.grid()
plt.show()
def fd_validation_elastic_differentials(solid):
epsilons = torch.logspace(-9, 3, 500)
perturb_global = 1e-3 * (2. * torch.rand(size=solid.v_def.shape) - 1.)
solid.displace(perturb_global)
v_def = solid.v_def.clone()
perturb = 2. * torch.rand(size=solid.v_def.shape) - 1.
errors = []
for eps in epsilons:
# Back to original
solid.update_def_shape(v_def)
perturb_0s = torch.zeros_like(perturb)
perturb_0s[solid.free_idx] = perturb[solid.free_idx]
an_df = solid.compute_force_differentials(perturb_0s)[solid.free_idx, :]
an_df_full = torch.zeros(solid.f.shape)
an_df_full[solid.free_idx] = an_df.clone()
# One step forward
solid.displace(perturb * eps)
f1 = solid.f[solid.free_idx, :]
f1_full = torch.zeros(solid.f.shape)
f1_full[solid.free_idx] = f1
# Two steps backward
solid.displace(-2*perturb * eps)
f2 = solid.f[solid.free_idx, :]
f2_full = torch.zeros(solid.f.shape)
f2_full[solid.free_idx] = f2
# Compute error
fd_delta_f = (f1_full - f2_full)/(2*eps)
norm_an_df = torch.linalg.norm(an_df_full)
norm_error = torch.linalg.norm(an_df_full - fd_delta_f)
errors.append(norm_error/norm_an_df)
plt.loglog(epsilons, errors)
plt.grid()
plt.show()

View File

@@ -0,0 +1,30 @@
import meshplot as mp
import numpy as np
# import jax.numpy as jnp
import torch
import torch
shadingOptions = {
"flat":True,
"wireframe":False,
}
def to_numpy(tensor):
return tensor.detach().clone().numpy()
def plot_torch_solid(solid, be, rot, length_scale, target_mesh=None):
'''
Input:
- solid : elastic solid to visualize
- be : boundary edges
- rot : transformation matrix to apply (here we assume it is a rotation)
- length_scale : length scale of the mesh, used to represent pinned vertices
- target_mesh :
'''
p = mp.plot(to_numpy(solid.v_def) @ rot.T, to_numpy(solid.t), shading=shadingOptions)
p.add_points(to_numpy(solid.v_def[solid.pin_idx, :]) @ rot.T, shading={"point_color":"black", "point_size": 0.1 * length_scale})
forcesScale = 2 * torch.max(torch.linalg.norm(solid.f_ext, axis=1))
p.add_lines(to_numpy(solid.v_def) @ rot.T, to_numpy(solid.v_def + solid.f_ext / forcesScale) @ rot.T)
p.add_edges(to_numpy(solid.v_rest) @ rot.T, be, shading={"line_color": "blue"})
if not target_mesh is None:
p.add_edges(to_numpy(target_mesh) @ rot.T, be, shading={"line_color": "red"})

Binary file not shown.

After

Width:  |  Height:  |  Size: 284 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 631 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 700 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 910 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 991 KiB

View File

@@ -0,0 +1,85 @@
import numpy as np
import torch
import sys as _sys
_sys.path.append("../src/")
from elasticenergy import *
from elasticsolid import *
from adjoint_sensitivity import *
from vis_utils import *
from objectives import *
from harmonic_interpolator import *
from shape_optimizer import *
from utils import *
import json, pytest
torch.set_default_dtype(torch.float64)
eps = 1E-6
with open('test_data4_beam.json', 'r') as infile:
homework_datas = json.load(infile)
@pytest.mark.timeout(2)
@pytest.mark.parametrize("data", homework_datas[0])
def test_adjoint(data):
young, poisson, v, t, rho, pin_idx, force_mass, v_rest, v_def, dJ_dx_U, adjoint_gt = data
ee = NeoHookeanElasticEnergy(young, poisson)
es = ElasticSolid(torch.tensor(v), torch.tensor(np.array(t), dtype=torch.int64), ee, rho=rho, pin_idx=torch.tensor(pin_idx, dtype=torch.int64), f_mass=torch.tensor(force_mass))
es.update_rest_shape(torch.tensor(v_rest))
es.update_def_shape(torch.tensor(v_def))
assert torch.linalg.norm(compute_adjoint(es, torch.tensor(dJ_dx_U)) - torch.tensor(adjoint_gt)) < eps
@pytest.mark.timeout(2)
@pytest.mark.parametrize("data", homework_datas[1])
def test_obj(data):
v, vt, bv, obj_gt = data
assert torch.linalg.norm(objective_target_BV(torch.tensor(v), torch.tensor(vt), torch.tensor(bv)) - torch.tensor(obj_gt)) < eps
@pytest.mark.timeout(2)
@pytest.mark.parametrize("data", homework_datas[2])
def test_obj_grad(data):
v, vt, bv, obj_grad_gt = data
assert torch.linalg.norm(grad_objective_target_BV(torch.tensor(v), torch.tensor(vt), torch.tensor(bv)) - torch.tensor(obj_grad_gt)) < eps
@pytest.mark.timeout(2)
@pytest.mark.parametrize("data", homework_datas[3])
def test_pt_load(data):
young, poisson, v, t, rho, pin_idx, force_mass, v_rest, v_def, f_point_idx, f_point, f_point_gt, f_ext_gt = data
ee = NeoHookeanElasticEnergy(young, poisson)
es = ElasticSolid(torch.tensor(v), torch.tensor(np.array(t), dtype=torch.int64), ee, rho=rho, pin_idx=torch.tensor(pin_idx, dtype=torch.int64), f_mass=torch.tensor(force_mass))
es.update_rest_shape(torch.tensor(v_rest))
es.update_def_shape(torch.tensor(v_def))
es.add_point_load(torch.tensor(f_point_idx), torch.tensor(f_point))
assert torch.linalg.norm(es.f_point - torch.tensor(f_point_gt)) < eps
assert torch.linalg.norm(es.f_ext - torch.tensor(f_ext_gt)) < eps
@pytest.mark.parametrize("data", homework_datas[4])
def test_set_params(data):
young, poisson, v, t, vt_surf, rho, pin_idx, force_mass, v_rest, v_def, new_params, v_rest_gt, v_def_gt = data
ee = NeoHookeanElasticEnergy(young, poisson)
es = ElasticSolid(torch.tensor(v_rest), torch.tensor(np.array(t), dtype=torch.int64), ee, rho=rho, pin_idx=torch.tensor(pin_idx, dtype=torch.int64), f_mass=torch.tensor(force_mass))
es.update_def_shape(torch.tensor(v_def))
optimizer = ShapeOptimizer(es, torch.tensor(vt_surf), weight_reg=0.)
optimizer.set_params(torch.tensor(new_params))
assert torch.linalg.norm(optimizer.solid.v_rest - torch.tensor(v_rest_gt)) < eps
assert torch.linalg.norm(optimizer.solid.v_def - torch.tensor(v_def_gt)) < eps
@pytest.mark.parametrize("data", homework_datas[5])
def test_line_search(data):
young, poisson, v, t, vt_surf, rho, pin_idx, force_mass, v_rest, v_def, obj_gt, l_iter_gt, success_gt = data
ee = NeoHookeanElasticEnergy(young, poisson)
es = ElasticSolid(torch.tensor(v_rest), torch.tensor(np.array(t), dtype=torch.int64), ee, rho=rho, pin_idx=torch.tensor(pin_idx, dtype=torch.int64), f_mass=torch.tensor(force_mass))
# es.update_rest_shape(torch.tensor(v_rest))
es.update_def_shape(torch.tensor(v_def))
optimizer = ShapeOptimizer(es, torch.tensor(vt_surf), weight_reg=0.)
obj, l_iter, success = optimizer.line_search_step(1e-2, 10)
assert torch.linalg.norm(obj - torch.tensor(obj_gt)) < eps
assert l_iter == l_iter_gt
assert success == success_gt

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long