-
Notifications
You must be signed in to change notification settings - Fork 0
/
index.html
490 lines (443 loc) · 27.5 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN">
<html>
<head>
<meta name="generator" content="HTML Tidy for Linux/x86 (vers 11 February 2007), see www.w3.org">
<style type="text/css">
/* Design Credits: Jon Barron and Deepak Pathak and Abhishek Kar and Saurabh Gupta*/
a {
color: #1772d0;
text-decoration:none;
}
a:focus, a:hover {
color: #f09228;
text-decoration:none;
}
body,td,th {
font-family: 'Titillium Web', Verdana, Helvetica, sans-serif;
font-size: 16px;
font-weight: 400
}
heading {
font-family: 'Titillium Web', Verdana, Helvetica, sans-serif;
font-size: 17px; /* 19 */
font-weight: 600 /* 1000 */
}
hr
{
border: 0;
height: 1px;
background-image: linear-gradient(to right, rgba(0, 0, 0, 0), rgba(0, 0, 0, 0.75), rgba(0, 0, 0, 0));
}
strong {
font-family: 'Titillium Web', Verdana, Helvetica, sans-serif;
font-size: 16px;
font-weight: 600 /* 800 */
}
strongred {
font-family: 'Titillium Web', Verdana, Helvetica, sans-serif;
color: 'red' ;
font-size: 16px
}
sectionheading {
font-family: 'Titillium Web', Verdana, Helvetica, sans-serif;
font-size: 22px;
font-weight: 600
}
pageheading {
font-family: 'Titillium Web', Verdana, Helvetica, sans-serif;
font-size: 38px;
font-weight: 400
}
.ImageBorder
{
border-width: 1px;
border-color: Black;
}
</style>
<link rel="shortcut icon" href="images/web-logo-white-120x120.png">
<script type="text/javascript" src="js/hidebib.js"></script>
<title>Hanning Liu</title>
<meta name="Hanning Liu's Homepage" http-equiv="Content-Type" content="Hanning Liu's Homepage">
<link href='https://fonts.googleapis.com/css?family=Titillium+Web:400,600,400italic,600italic,300,300italic' rel='stylesheet' type='text/css'>
<!-- Start : Google Analytics Code -->
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-XXXXX-Y', 'auto');
ga('send', 'pageview');
</script>
<!-- End : Google Analytics Code -->
<!-- Scramble Script by Jeff Donahue -->
<script src="js/scramble.js"></script>
</head>
<body>
<table width="900" border="0" align="center" border="0" cellspacing="0" cellpadding="20">
<tr><td>
<table width="100%" align="center" border="0" cellspacing="0" cellpadding="20">
<p align="center">
<pageheading>Hanning Liu 「刘函宁」</pageheading><br>
</p>
<tr>
<td width="30%" valign="top"><a href="images/HanningLiu-2021_1M.png"><img src="images/HanningLiu-2021_1M.png" width="100%" style="border-radius:15px"></a>
<p align=center>
| <a href="data/CV_HanningLiu_20240904.pdf">CV</a> |
<a href="mailto:[email protected]">Email</a> |
<a href="https://scholar.google.com/citations?user=czEWyOQAAAAJ">Google Scholar</a> |
<br/>
| <a href="data/Portfolio_Revised_0903_MergeTwoPages_small.pdf">Portfolio</a> |
<a href="https://github.com/Hanning-Liu">Github</a> |
<a href="https://www.linkedin.com/in/hanning-liu-059886288/">LinkedIn</a> |
</p>
<p align="center" style="margin-top:-8px;"><iframe id="twitter-widget-0" scrolling="no" frameborder="0" allowtransparency="true" allowfullscreen="true" class="twitter-follow-button twitter-follow-button-rendered" style="position: static; visibility: visible; width: 180px; height: 20px;" title="Twitter Follow Button" src="https://platform.twitter.com/widgets/follow_button.2f70fb173b9000da126c79afe2098f02.en.html#dnt=false&id=twitter-widget-0&lang=en&screen_name=HanningLiu226&show_count=false&show_screen_name=true&size=m&time=1706734206165" data-screen-name=""></iframe><script async="" src="https://platform.twitter.com/widgets.js" charset="utf-8"></script></p>
</td>
<td width="70%" valign="top" align="justify">
<p>I am a second-year Master student at the <a href="https://caup.tongji.edu.cn/caupen/main.htm">College of Architecture and Urban Planning (CAUP)</a> at <a href="https://en.tongji.edu.cn/p/#/">Tongji University</a>, advised by <a href="https://digitalfutures.tongji.edu.cn/25/39/c13677a140601/page.htm"> Philip F. YUAN</a>.
</p>
<p>Previously, I received my Bachelor's degree in Architecture at <a href="https://www.xauat.edu.cn/en/index.html"> Xi'an University of Architecture and Technology (XAUAT)</a>. I also spent time as a teaching assistant at the (2023 Summer) ZAHA Tectonism Workshop at DigitalFUTURES, the (2023 Autumn) Computational Design Class at Shanghai University, and the (2024 Spring) AI-Driven Performance-Based Tectonics Class at Tongji University.
</p>
<p>As an AR/MR researcher, my goal is to recognize and enhance the irreplaceable strengths of human beings in the automation era. My focus is on providing architects with <u>an immersive experience throughout the design, fabrication, and assembly processes, ultimately leading to accurate construction outcomes</u>. To do so, I am dedicated to <u>developing geometrical algorithms and pose tracking methods</u>, seamlessly integrating them into an immersive software on the spatial computational platform.
</p>
<p>Email: liuhanning [AT] tongji.edu.cn
</p>
</td>
</tr>
</table>
<hr/>
<table width="100%" align="center" border="0" cellspacing="0" cellpadding="10">
<tr><td><sectionheading> Publications</sectionheading></td></tr>
</table>
<table width="100%" align="center" border="0" cellspacing="0" cellpadding="15">
<tr>
<td width="40%" valign="top" align="center"><a href="">
<img src="images/bfxr/bfxr.png" poster="./images/loading-icon.gif" alt="sym" width="90%" style="padding-top:0px;padding-bottom:0px;border-radius:15px;"></video>
</a></td>
<td width="60%" valign="top">
<p><a href="" id="BFXR">
<heading>Bending Form in Extended Reality: A Gesture-Based Workflow of Chair Design and Fabrication</heading></a><br>
Hanning Liu, Chao Yan, Xingjie Xie, Tianyu Zhang, Ruyi Yang, Honglei Wu, Yao Zhang, Philip F. Yuan(<span>✉</span>) <br>
2024<br>
</p>
<div class="paper" id="bfxr">
<a >pdf(accepted by ACADIA 2024, available by request)</a> |
<a href="javascript:toggleblock('bfxr_abs')">abstract</a>
<!--<a shape="rect" href="javascript:togglebib('bfxr')" class="togglebib">bibtex</a> |-->
<p align="justify"> <i id="bfxr_abs">Methods of integrating Extended Reality (XR) technology into the fabrication and assembly process are an intensively studied topic in the fields of Human-computer Interaction and Digital Fabrication. However, existing research primarily focuses on fabrication processes rather than breaking the limitations of current computer-aided design tools. XR technology can offer an immersive environment where designers can intuitively interact with 3D models. This paper pioneers the integration of XR not only for fabrication but also for the intuitive design process. The limitation of nowadays computer-aided design tools is its restriction to manipulating geometries in only 2D screen space, or the unintuitive predefined parameters and algorithms. To address this issue, we introduce a gesture-based intuitive design workflow. This workflow features three design modes: the “Free Draw” mode, the “Polyline Fillet” mode, and the “Spline Curve” mode. Through the design and fabrication process of two bending-form chairs, we demonstrate the implementation of mathematical and geometrical manipulation algorithms on spatial computation platform using a head-mounted display device. Besides, this research also elaborates on how to convert a spline curve to a format suitable for bending, as well as how to implement the steel tube bending simulation to prevent collisions with the environment during actual bending. The results of this research illustrate the entire design and fabrication process, highlighting the potential of XR to enhance the design process through a comparison between intuitive and traditional design methods, and paving the way of further development of the design and fabrication tools on spatial computation platforms.</i></p>
<pre xml:space="preserve">
<!--
@inproceedings{he2024learning,
author = {He, Tairan and Luo, Zhengyi and Xiao, Wenli and Zhang, Chong and Kitani, Kris and Liu, Changliu and Shi, Guanya},
title = {Learning Human-to-Humanoid Real-Time Whole-Body Teleoperation},
booktitle = {arXiv},
year = {2024},
}
-->
</pre>
</div>
</td>
</tr>
<tr>
<td width="40%" valign="top" align="center"><a href="">
<video playsinline autoplay loop muted src="images/mcar/MCAR_mute_1min31s.mp4" poster="./images/loading-icon.gif" alt="sym" width="90%" style="padding-top:0px;padding-bottom:0px;border-radius:15px;"></video>
</a></td>
<td width="60%" valign="top">
<p><a href="" id="MCAR">
<heading>Leveraging Motion Capture System for High Accuracy AR-Assisted Assembly (BEST PAPER AWARD)</heading></a><br>
Hanning Liu, Xingjie Xie, Yujiao Li, Xiaofan Gao, Honglei Wu, Yao Zhang, Philip F. Yuan(<span>✉</span>)<br>
2024<br>
</p>
<div class="paper" id="mcar">
<!-- <a href="https://agile-but-safe.github.io/">webpage</a> | -->
<a >pdf(accepted by CDRF 2024, available by request)</a> |
<a href="javascript:toggleblock('mcar_abs')">abstract</a>
<!-- <a shape="rect" href="javascript:togglebib('agile-but-safe')" class="togglebib">bibtex</a> -->
<p align="justify"> <i id="mcar_abs">Augmented Reality (AR) allows workers to construct buildings accurately and intuitively without the need for traditional tools like 2-D drawings and rulers. However, accurately tracking worker’s pose remains a significant challenge in existing experiments due to their continuous and irregular movement. This re-search discusses a series of methods using cameras and algorithms to achieve the 6-DoF pose tracking function and reveal the relationship between each method and corresponding tracking accuracy in order to figure out a robust approach of AR-assisted assembly. This paper begins with a consideration of the possible limitations of existing methods including the image drift associ-ated with visual SLAM and the time-consuming nature of fiducial markers. Next, the entire hardware and software framework was introduced, which elaborates on how the motion capture system is integrated into the AR-assisted assembly system. Then, some experiments have been carried out to demonstrate the connection between the system set up and pose tracking ac-curacy. This research shows the possibility to easily finish assembly task based on AR technology by integrating motion capture system.</i></p>
<pre xml:space="preserve">
///
</pre>
</div>
</td>
</tr>
<tr>
<td width="40%" valign="top" align="center"><a href="https://link.springer.com/article/10.1007/s00004-024-00765-0">
<img src="images/abm/abm.png" alt="sym" width="90%" style="padding-top:0px; padding-bottom:0px; border-radius:15px; height: auto;">
</a></td>
<td width="60%" valign="top">
<p><a href="https://link.springer.com/article/10.1007/s00004-024-00765-0" id="ABM">
<heading>Agent-Based Principal Strips Modeling for Freeform Surfaces in Architecture</heading></a><br>
Hua Chai, Luis Orozco, Fabian Kannenberg, Lasath Siriwardena, Tobias Schwinn, Hanning Liu, Achim Menges(<span>✉</span>), Philip F. Yuan(<span>✉</span>)<br>
2024<br>
</p>
<div class="paper" id="abm">
<!-- <a href="https://manipulation-locomotion.github.io">webpage</a> | -->
<a href="https://www.researchgate.net/publication/378490995_Agent-Based_Principal_Strips_Modeling_for_Freeform_Surfaces_in_Architecture">pdf</a> |
<a href="javascript:toggleblock('abm_abs')">abstract</a> |
<a shape="rect" href="javascript:togglebib('abm')" class="togglebib">bibtex</a> |
<a href="https://link.springer.com/article/10.1007/s00004-024-00765-0">springer</a>
<p align="justify"> <i id="abm_abs">The principal curvature (PC) of a freeform surface, as an important indicator of its fundamental features, is frequently used to guide their rationalization in the field of architectural geometry. The division of a surface using its PC lines into principal strips (PSs) is an innovative way to break down a freeform surface for construction. However, the application of PC networks in architectural design is hindered by the difficulty to generate them and flexibly control their density. This paper introduces a method for PS-based reconstruction of freeform surfaces with different umbilical conditions in the early stages of design. An agent-based modeling approach is developed to find the umbilics and increase the degree of control over the spacing of PC lines. This research can effectively expand the application range of PS-based surface reconstruction methods for freeform architectures.</i></p>
<pre xml:space="preserve">
@article{Chai_2024,
title={Agent-Based Principal Strips Modeling for Freeform Surfaces in Architecture},
volume={26},
ISSN={1522-4600},
url={http://dx.doi.org/10.1007/s00004-024-00765-0},
DOI={10.1007/s00004-024-00765-0},
number={2},
journal={Nexus Network Journal},
publisher={Springer Science and Business Media LLC},
author={Chai, Hua and Orozco, Luis and Kannenberg, Fabian and Siriwardena, Lasath and Schwinn, Tobias and Liu, Hanning and Menges, Achim and Yuan, Philip F.},
year={2024},
month=feb,
pages={369–396}
}
</pre>
</div>
</td>
</tr>
<tr>
<td width="40%" valign="top" align="center"><a href="https://kns.cnki.net/kcms2/article/abstract?v=vRsBqZf6HxjLKOrcyv2YrReo6Kcw30US5S907H_rh3HiNOQCQc4ogzN6QfA1ui3Tohn2LpM59L61pOhZovLu_YBc-zQBgnRQYvNxed8gCqSzeFhbAVcjTvm9sJ5wjJKen-PY7APz1us=&uniplatform=NZKPT&flag=copy">
<img src="images/sibp/sibp.png" alt="sym" width="90%" style="padding-top:0px; padding-bottom:0px; border-radius:15px; height: auto;">
</a></td>
<td width="60%" valign="top">
<p><a href="https://kns.cnki.net/kcms2/article/abstract?v=vRsBqZf6HxjLKOrcyv2YrReo6Kcw30US5S907H_rh3HiNOQCQc4ogzN6QfA1ui3Tohn2LpM59L61pOhZovLu_YBc-zQBgnRQYvNxed8gCqSzeFhbAVcjTvm9sJ5wjJKen-PY7APz1us=&uniplatform=NZKPT&flag=copy" id="SIBP">
<heading>Spatial Implementation of Behavioral Performance: Tourist Center of Shanghai Xuhui West Coast</heading></a><br>
Philip F. Yuan(<span>✉</span>), Hanning Liu<br>
2024<br>
</p>
<div class="paper" id="sibp">
<a href="data/sibp.pdf">pdf</a> |
<a href="javascript:toggleblock('sibp_abs')">abstract</a> |
<a shape="rect" href="javascript:togglebib('sibp')" class="togglebib">bibtex</a> |
<a href="https://kns.cnki.net/kcms2/article/abstract?v=vRsBqZf6HxjLKOrcyv2YrReo6Kcw30US5S907H_rh3HiNOQCQc4ogzN6QfA1ui3Tohn2LpM59L61pOhZovLu_YBc-zQBgnRQYvNxed8gCqSzeFhbAVcjTvm9sJ5wjJKen-PY7APz1us=&uniplatform=NZKPT&flag=copy">cnki</a>
<p align="justify"> <i id="sibp_abs"><正>上海徐汇西岸三港线游客集散中心是同济大学建筑设计研究院和创盟国际建筑设计有限公司的设计作品,总建筑面积为12 110 m2,其中地上两层4 310 m2,地下一层7 800 m2。该项目是对老渡口建筑品质的提升与重建。老渡口位于徐汇西岸进木港南侧,据称进木港因做木材水上运输而得名,连结着对岸的浦东三林,故而人们称该航线为“三港线”。</i></p>
<pre xml:space="preserve">
@article{ZWJC202401002,
author = {袁烽 and 刘函宁},
title = {行为性能化的空间实现——上海徐汇西岸三港线游客集散中心},
journal = {中外建筑},
volume = {},
number = {01},
pages = {8-13},
year = {2024},
issn = {1008-0422},
doi ={10.19940/j.cnki.1008-0422.2024.01.002}
}
</pre>
</div>
</td>
</tr>
<tr>
<td width="40%" valign="top" align="center">
<a href="https://kns.cnki.net/kcms2/article/abstract?v=1ya23wS0yuCaEf8Dr7d03w-LbR8rpDoJ2ModU8oK5W1PsRYBbZpkGqLMfUc2Fz8kcaGqYNx4G_u_i1KVHycaT1KeuXejC6PRm3svSemmWnE6i6fl46HA5UjE7joH6J6bN4uFyfaCQ8Y=&uniplatform=NZKPT&flag=copy">
<img src="images/prompt_survey/prompt_survey.png" alt="sym" width="90%" style="padding-top:0px; padding-bottom:0px; border-radius:15px; height: auto;">
</a>
</td>
<td width="60%" valign="top">
<p><a href="https://kns.cnki.net/kcms2/article/abstract?v=1ya23wS0yuCaEf8Dr7d03w-LbR8rpDoJ2ModU8oK5W1PsRYBbZpkGqLMfUc2Fz8kcaGqYNx4G_u_i1KVHycaT1KeuXejC6PRm3svSemmWnE6i6fl46HA5UjE7joH6J6bN4uFyfaCQ8Y=&uniplatform=NZKPT&flag=copy" id="PROMPT_SURVEY">
<heading>Prompt Writing Approach in GAI Tools Aided Architectural Design: Taking Urban Camp Center Design as an Example</heading></a><br>
Hanning Liu, Hao Wu, Xingjie Xie, Menghao Yuan, Philip F. Yuan(<span>✉</span>)<br>
2023<br>
</p>
<div class="paper" id="prompt_survey">
<a href="data/prompt_survey.pdf">pdf</a> |
<a href="javascript:toggleblock('prompt_survey_abs')">abstract</a> |
<a shape="rect" href="javascript:togglebib('prompt_survey')" class="togglebib">bibtex</a> |
<a href="https://kns.cnki.net/kcms2/article/abstract?v=1ya23wS0yuCaEf8Dr7d03w-LbR8rpDoJ2ModU8oK5W1PsRYBbZpkGqLMfUc2Fz8kcaGqYNx4G_u_i1KVHycaT1KeuXejC6PRm3svSemmWnE6i6fl46HA5UjE7joH6J6bN4uFyfaCQ8Y=&uniplatform=NZKPT&flag=copy">cnki</a>
<p align="justify"> <i id="prompt_survey_abs">Recently, Generative Artificial Intelligence (GAI) tools combine diffusion models with prompt engineering, which can quickly generate architectural images by inputting text or images, thereby improving the work efficiency of architects. However, many architects often do not get results that match their design intent when using such tools. Therefore, the purpose of this study is to propose a systematic method of prompt writing to help users obtain results that are more in line with their design intentions. First, this paper introduces the development history of GAI technology, the concept definition of Artificial Intelligence Generated Content (AIGC), and four image-based GAI tools. Secondly, this paper adopts the method of controlled experiment, setting up 4 control groups, inputting the same text and images as prompt into the four tools respectively, and analyzing the image results generated by each tool with the consistency evaluation method of FID. Finally, this paper selects Stable Diffusion as a tool, and takes the design of urban camp center as an example to explore its role in the architectural design process, showing a set of prompt writing process in three stages: "collecting design references", "combining artificial intelligence and human conception to process a large amount of image information", and "completing the text prompt writing with reference to the architectural terminology table". In general, this paper provides a systematic prompt writing methods, so that the GAI tool can output the results that meet the architect's design intention, which greatly helps the architect to improve work efficiency.</i></p>
<pre xml:space="preserve">
@conference{JZSJ202310001091,
author = {刘函宁 and 吴昊 and 谢星杰 and 袁梦豪 and 袁烽},
title = {生成式人工智能工具辅助建筑设计中的提示词撰写方法研究——以城市露营地设计为例},
booktitle = {兴数育人 引智筑建:2023全国建筑院系建筑数字技术教学与研究学术研讨会论文集},
year = {2023},
editor = {},
pages = {433-436},
publisher = {华中科技大学出版社}
}
</pre>
</div>
</td>
</tr>
<!-- <tr>
<td width="40%" valign="top" align="center"><a href="https://manipulation-locomotion.github.io">
<video playsinline autoplay loop muted src="images/wbc-clip.mp4" poster="./images/loading-icon.gif" alt="sym" width="90%" style="padding-top:0px;padding-bottom:0px;border-radius:15px;"></video>
</a></td>
<td width="60%" valign="top">
<p><a href="https://manipulation-locomotion.github.io" id="MANIPLOCO">
<heading>Deep Whole-Body Control: Learning a Unified Policy for Manipulation and Locomotion</heading></a><br>
Tairan He*, Xuxin Cheng*, Deepak Pathak<br>
CoRL 2022 (Oral)<br>
<b style="color:rgb(255, 100, 100);">Best Systems Paper Award Finalist (top 4)</b>
</p>
<div class="paper" id="maniploco">
<a href="https://manipulation-locomotion.github.io">webpage</a> |
<a href="https://arxiv.org/pdf/2210.10044.pdf">pdf</a> |
<a href="javascript:toggleblock('maniploco_abs')">abstract</a> |
<a shape="rect" href="javascript:togglebib('maniploco')" class="togglebib">bibtex</a> |
<a href="https://arxiv.org/abs/2210.10044">arXiv</a> |
<a href="https://openreview.net/forum?id=zldI4UpuG7v">OpenReview</a> |
<a href="https://www.youtube.com/watch?v=i9EdPl8uJUA">video</a>
<p align="justify"> <i id="maniploco_abs">An attached arm can significantly increase the applicability of legged robots to several mobile manipulation tasks that are not possible for the wheeled or tracked counterparts. The standard control pipeline for such legged manipulators is to decouple the controller into that of manipulation and locomotion. However, this is ineffective and requires immense engineering to support coordination between the arm and legs, error can propagate across modules causing non-smooth unnatural motions. It is also biological implausible where there is evidence for strong motor synergies across limbs. In this work, we propose to learn a unified policy for whole-body control of a legged manipulator using reinforcement learning. We propose Regularized Online Adaptation to bridge the Sim2Real gap for high-DoF control, and Advantage Mixing exploiting the causal dependency in the action space to overcome local minima during training the whole-body system. We also present a simple design for a low-cost legged manipulator, and find that our unified policy can demonstrate dynamic and agile behaviors across several task setups.</i></p>
<pre xml:space="preserve">
@inproceedings{fu2022deep,
author = {Fu, Zipeng and Cheng, Xuxin and
Pathak, Deepak},
title = {Deep Whole-Body Control: Learning a Unified Policy
for Manipulation and Locomotion},
booktitle = {Conference on Robot Learning ({CoRL})},
year = {2022}
}
</pre>
</div>
</td>
</tr> -->
</table>
<table width="100%" align="center" border="0" cellspacing="0" cellpadding="10">
<tr><td><sectionheading> Software</sectionheading></td></tr>
</table>
<table width="100%" align="center" border="0" cellspacing="0" cellpadding="15">
<tr>
<td width="40%" valign="top" align="center">
<a href="https://www.food4rhino.com/en/app/marker-based-ar">
<img src="images/gh-plugin-ar/image.png" alt="sym" width="100%" style="padding-top:0px; padding-bottom:0px; border-radius:15px; height: auto;">
</a>
</td>
<td width="60%" valign="top">
<p><a href="https://www.food4rhino.com/en/app/marker-based-ar" id="AUTOCOST">
<heading>Fiducial marker-based AR-assisted assembly plugin for Grasshopper</heading></a><br>
</p>
<div class="paper" id="autocost">
<a href="https://github.com/Hanning-Liu/Fiducial_marker_based_AR_assisted_assembly_plugin_for_Grasshopper"> Source Code</a> |
<a href="https://www.food4rhino.com/en/app/marker-based-ar"> Food4Rhino</a>
<p align="justify"> <i id="wkfg_abs">A Grasshopper plugin that can overlay the image from virtual and physical camera to guide assembly.</i></p>
</div>
</td>
</tr>
</table>
<!--
<table width="100%" align="center" border="0" cellspacing="0" cellpadding="10">
<tr><td><sectionheading> Reviewer Service</sectionheading></td></tr>
</table>
<table width="100%" align="center" border="0" cellspacing="0" cellpadding="15">
<tr>
<td style="padding:20px;width:100%;vertical-align:middle">
International Joint Conference on Artificial Intelligence <b>(IJCAI)</b> 2024
<br>
International Conference on Machine Learning <b>(ICML)</b>, 2024
<br>
International Conference on Learning Representations <b>(ICLR)</b>, 2024
<br>
IEEE Conference on Decision and Control <b>(CDC)</b>, 2023
<br>
Conference on Neural Information Processing Systems <b>(NeurIPS)</b>, 2023
<br>
Learning for Dynamics & Control Conference <b>(L4DC)</b> 2023
<br>
AAAI Conference on Artificial Intelligence <b>(AAAI)</b> 2023, 2024
<br>
Conference on Robot Learning <b>(CoRL)</b> 2022, 2023
</p>
</td>
</tr>
</table>
-->
<table width="100%" align="center" border="0" cellspacing="0" cellpadding="20">
<tbody>
<tr>
<td style="padding:0px">
<br>
<br>
<div>
<script type='text/javascript' id='clustrmaps' src='//cdn.clustrmaps.com/map_v2.js?cl=080808&w=300&t=tt&d=com1W8U51qThmXjGaivgzobZsac2agr2A12iWlaAm_k&co=ffffff&cmo=3acc3a&cmn=ff5353&ct=808080'></script>
</div>
</td>
</tr>
</tbody>
</table>
<hr/>
<table width="100%" align="center" border="0" cellspacing="0" cellpadding="2">
<tr><td><br><p align="right">
Website template from <a href="http://www.cs.berkeley.edu/~barron/">here</a>, <a href="http://www.cs.cmu.edu/~dpathak/">here</a> and <a href="https://tairanhe.com/">here</a>
</font></p></td></tr>
</table>
</td></tr>
</table>
<script xml:space="preserve" language="JavaScript">
hideallbibs();
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('material_review_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('ieee_iot_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('acm_turc_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('aog_mcts_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('pragmatics_marl_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('collab_marl_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('rma_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('energyloco_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('navloco_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('bfxr_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('mcar_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('abm_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('sibp_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('prompt_survey_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('patchail_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('sisos_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('uaissa_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('autocost_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('a2ls_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('issa_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('ebil_abs');
</script>
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('maniploco_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('parkour_abs');
</script>
<script xml:space="preserve" language="JavaScript">
hideblock('mobile_aloha_abs');
</script>
</body>
</html>