-
Notifications
You must be signed in to change notification settings - Fork 0
/
ocl.html
7 lines (7 loc) · 13.9 KB
/
ocl.html
1
2
3
4
5
6
7
<!DOCTYPE html><html lang="en"><head><meta charSet="utf-8"/><meta name="viewport" content="width=device-width"/><title>OCL</title><meta content="O.C.L" name="description"/><link href="https://http://mvig-rhos.com/ocl" rel="canonical"/><link href="/favicon.ico" rel="icon" sizes="any"/><link href="/icon.svg" rel="icon" type="image/svg+xml"/><link href="/apple-touch-icon.png" rel="apple-touch-icon"/><link href="/site.webmanifest" rel="manifest"/><meta content="OCL" property="og:title"/><meta content="O.C.L" property="og:description"/><meta content="https://http://mvig-rhos.com/ocl" property="og:url"/><meta content="OCL" name="twitter:title"/><meta content="O.C.L" name="twitter:description"/><meta name="next-head-count" content="14"/><meta charSet="utf-8"/><meta content="notranslate" name="google"/><link rel="preload" href="/_next/static/css/144e01f988e2575a.css" as="style"/><link rel="stylesheet" href="/_next/static/css/144e01f988e2575a.css" data-n-g=""/><noscript data-n-css=""></noscript><script defer="" nomodule="" src="/_next/static/chunks/polyfills-c67a75d1b6f99dc8.js"></script><script src="/_next/static/chunks/webpack-aa92048316e96af3.js" defer=""></script><script src="/_next/static/chunks/framework-7751730b10fa0f74.js" defer=""></script><script src="/_next/static/chunks/main-a9b50f256c2cfb57.js" defer=""></script><script src="/_next/static/chunks/pages/_app-18e1e0f85fa3b58e.js" defer=""></script><script src="/_next/static/chunks/526-2e1a63ee81ce1f73.js" defer=""></script><script src="/_next/static/chunks/342-08dd1fd52b1d6335.js" defer=""></script><script src="/_next/static/chunks/675-717e3cc8fb67a947.js" defer=""></script><script src="/_next/static/chunks/345-b3e1ef088eb31a90.js" defer=""></script><script src="/_next/static/chunks/pages/ocl-23f52d3a9fdc6529.js" defer=""></script><script src="/_next/static/27sOB7OcKYXLbYhtS4T1I/_buildManifest.js" defer=""></script><script src="/_next/static/27sOB7OcKYXLbYhtS4T1I/_ssgManifest.js" defer=""></script></head><body class="bg-neutral-900"><div id="__next"><button aria-label="Menu Button" class="fixed top-2 right-2 z-40 rounded-md bg-orange-500 p-2 ring-offset-gray-800/60 hover:bg-orange-400 focus:outline-none focus:ring-0 focus-visible:ring-2 focus-visible:ring-orange-500 focus-visible:ring-offset-2 sm:hidden"><svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" aria-hidden="true" class="h-8 w-8 text-white"><path stroke-linecap="round" stroke-linejoin="round" d="M4 6h16M4 12h16m-7 6h7"></path></svg><span class="sr-only">Open sidebar</span></button><header class="fixed top-0 z-50 hidden w-full bg-neutral-900/50 p-4 backdrop-blur sm:block" id="headerNav"><nav class="flex justify-center gap-x-8"><a class="-m-1.5 p-1.5 rounded-md font-bold first-letter:uppercase hover:transition-colors hover:duration-300 focus:outline-none focus-visible:ring-2 focus-visible:ring-orange-500 sm:hover:text-orange-500 text-neutral-100 text-neutral-100" href="/">Home</a><a class="-m-1.5 p-1.5 rounded-md font-bold first-letter:uppercase hover:transition-colors hover:duration-300 focus:outline-none focus-visible:ring-2 focus-visible:ring-orange-500 sm:hover:text-orange-500 text-neutral-100 text-neutral-100" href="/ocl#about">about</a><a class="-m-1.5 p-1.5 rounded-md font-bold first-letter:uppercase hover:transition-colors hover:duration-300 focus:outline-none focus-visible:ring-2 focus-visible:ring-orange-500 sm:hover:text-orange-500 text-neutral-100 text-neutral-100" href="/ocl#demo">demo</a><a class="-m-1.5 p-1.5 rounded-md font-bold first-letter:uppercase hover:transition-colors hover:duration-300 focus:outline-none focus-visible:ring-2 focus-visible:ring-orange-500 sm:hover:text-orange-500 text-neutral-100 text-neutral-100" href="/ocl#news">news</a><a class="-m-1.5 p-1.5 rounded-md font-bold first-letter:uppercase hover:transition-colors hover:duration-300 focus:outline-none focus-visible:ring-2 focus-visible:ring-orange-500 sm:hover:text-orange-500 text-neutral-100 text-neutral-100" href="/ocl#download">download</a><a class="-m-1.5 p-1.5 rounded-md font-bold first-letter:uppercase hover:transition-colors hover:duration-300 focus:outline-none focus-visible:ring-2 focus-visible:ring-orange-500 sm:hover:text-orange-500 text-neutral-100 text-neutral-100" href="/ocl#publications">publications</a></nav></header><div class="relative flex h-screen-no w-screen items-center justify-center bg-neutral-100"><div class="flex flex-col z-10 w-full max-w-screen-lg p-4 lg:px-0 items-center text-center "><div class="h-20"></div><h1 class="text-3xl font-bold text-gray-800 sm:text-4xl lg:text-5xl p-4">OCL: Object Concept Learning</h1><p class="text-gray-600 text-2xl">MVIG-RHOS, SJTU</p></div></div><section class="bg-neutral-100 px-4 py-8 md:py-12 lg:px-8" id="about"><div class="mx-auto max-w-screen-lg"><div class="flex flex-col"><div class="grid justify-items-center pb-8"><div class="w-3/4"><span style="box-sizing:border-box;display:inline-block;overflow:hidden;width:initial;height:initial;background:none;opacity:1;border:0;margin:0;padding:0;position:relative;max-width:100%"><span style="box-sizing:border-box;display:block;width:initial;height:initial;background:none;opacity:1;border:0;margin:0;padding:0;max-width:100%"><img style="display:block;max-width:100%;width:initial;height:initial;background:none;opacity:1;border:0;margin:0;padding:0" alt="" aria-hidden="true" src="data:image/svg+xml,%3csvg%20xmlns=%27http://www.w3.org/2000/svg%27%20version=%271.1%27%20width=%273327%27%20height=%27732%27/%3e"/></span><img alt="demo" src="data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7" decoding="async" data-nimg="intrinsic" class="place-self-center" style="position:absolute;top:0;left:0;bottom:0;right:0;box-sizing:border-box;padding:0;border:none;margin:auto;display:block;width:0;height:0;min-width:100%;max-width:100%;min-height:100%;max-height:100%"/><noscript><img alt="demo" src="/_next/static/media/oclTeaser.43b8f595.png" decoding="async" data-nimg="intrinsic" style="position:absolute;top:0;left:0;bottom:0;right:0;box-sizing:border-box;padding:0;border:none;margin:auto;display:block;width:0;height:0;min-width:100%;max-width:100%;min-height:100%;max-height:100%" class="place-self-center" loading="lazy"/></noscript></span></div></div><div>Understanding objects is a central building block of artificial intelligence, especially for embodied AI. Even though object recognition excels with deep learning, current machines still struggle to learn higher-level knowledge, e.g., what attributes does an object have, what can we do with an object. In this work, we propose a challenging Object Concept Learning (OCL) task to push the envelope of object understanding. It requires machines to reason out object affordances and simultaneously give the reason: what attributes make an object possesses these affordances. To support OCL, we build a densely annotated knowledge base including extensive labels for three levels of object concept: categories, attributes, and affordances, together with their causal relations. By analyzing the causal structure of OCL, we present a strong baseline, Object Concept Reasoning Network (OCRN). It leverages causal intervention and concept instantiation to infer the three levels following their causal relations.</div></div></div></section><section class="bg-neutral-100 px-4 py-8 md:py-12 lg:px-8" id="demo"><div class="mx-auto max-w-screen-lg"><div class="grid grid-cols-1 gap-y-4 py-8 first:pt-0 last:pb-0 md:grid-cols-4"><div class="col-span-1 flex justify-center md:justify-start"><div class="relative h-max"><h2 class="text-xl font-bold uppercase text-neutral-800">Demo</h2><span class="absolute inset-x-0 border-b-2 border-orange-400"></span><div></div></div></div><div class="col-span-1 flex flex-col md:col-span-3"><div class="flex flex-col"><video autoplay="" loop="" controls=""><source src="/media/demo_small.mp4" type="video/mp4"/></video><div class="grid grid-cols-2 pt-6 gap-1"><p> <b>Left-top</b>: object (in yellow box) </p><p> <b>Right-top</b>: key causal graph </p><p> <b>Left-bottom</b>: affordance prediction score </p><p> <b>Right-bottom</b>: key causal relations </p><p> <a class="underline text-sky-600" href="https://youtu.be/NTyJmTzhfkE">Full demo on Youtube</a></p><p> <a class="underline text-sky-600" href="https://www.bilibili.com/video/BV1Vm4y1V7aC/?share_source=copy_web&vd_source=33c221d66435cf014ff6a86a1ddd62b8">Full demo on BiliBili</a></p></div></div></div></div></div></section><section class="bg-neutral-100 px-4 py-8 md:py-12 lg:px-8" id="news"><div class="mx-auto max-w-screen-lg"><div class="grid grid-cols-1 gap-y-4 py-8 first:pt-0 last:pb-0 md:grid-cols-4"><div class="col-span-1 flex justify-center md:justify-start"><div class="relative h-max"><h2 class="text-xl font-bold uppercase text-neutral-800">News and Olds</h2><span class="absolute inset-x-0 border-b-2 border-orange-400"></span><div></div></div></div><div class="col-span-1 flex flex-col md:col-span-3"><div class="flex flex-col"><div class="pb-2"><span class="flex-1 font-bold sm:flex-none">[<!-- -->2023.11<!-- -->] </span><span class="flex-1 sm:flex-none">We release the code and data of OCL on <a class="underline text-sky-600" href="https://github.com/silicx/ObjectConceptLearning">Github</a>.</span></div><div class="pb-2"><span class="flex-1 font-bold sm:flex-none">[<!-- -->2023.07<!-- -->] </span><span class="flex-1 sm:flex-none">OCL will appear at ICCV 2023.</span></div><div class="pb-2"><span class="flex-1 font-bold sm:flex-none">[<!-- -->2022.12<!-- -->] </span><span class="flex-1 sm:flex-none">Our preprint paper is available on <a class="underline text-sky-600" href="https://arxiv.org/abs/2212.02710">arXiv</a>.</span></div></div></div></div></div></section><section class="bg-neutral-100 px-4 py-8 md:py-12 lg:px-8" id="download"><div class="mx-auto max-w-screen-lg"><div class="grid grid-cols-1 gap-y-4 py-8 first:pt-0 last:pb-0 md:grid-cols-4"><div class="col-span-1 flex justify-center md:justify-start"><div class="relative h-max"><h2 class="text-xl font-bold uppercase text-neutral-800">Download</h2><span class="absolute inset-x-0 border-b-2 border-orange-400"></span><div></div></div></div><div class="col-span-1 flex flex-col md:col-span-3"><div class="flex flex-col"><div><p>Our code and full data are available on <a class="underline text-sky-600" href="https://github.com/silicx/ObjectConceptLearning">Github</a>.<br/></p></div></div></div></div></div></section><section class="bg-neutral-100 px-4 py-8 md:py-12 lg:px-8" id="publications"><div class="mx-auto max-w-screen-lg"><div class="grid grid-cols-1 gap-y-4 py-8 first:pt-0 last:pb-0 md:grid-cols-4"><div class="col-span-1 flex justify-center md:justify-start"><div class="relative h-max"><h2 class="text-xl font-bold uppercase text-neutral-800">Publications</h2><span class="absolute inset-x-0 border-b-2 border-orange-400"></span><div></div></div></div><div class="col-span-1 flex flex-col md:col-span-3"><div class="flex flex-col divide-y-2"><div>To use our data and code in your project, please cite:</div><div class="text-sm bg-neutral-300 p-2"><pre><code>@inproceedings{li2023beyond,
title={Beyond Object Recognition: A New Benchmark towards Object Concept Learning},
author={Li, Yong-Lu and Xu, Yue and Xu, Xinyu and Mao, Xiaohan and Yao, Yuan and Liu, Siqi and Lu, Cewu},
booktitle={Proceedings of the IEEE/CVF International Conference on Computer Vision},
pages={20029--20040},
year={2023}
}</code></pre></div></div></div></div></div></section><section class="bg-neutral-100 px-4 py-8 md:py-12 lg:px-8" id="disclaimer"><div class="mx-auto max-w-screen-lg"><div class="grid grid-cols-1 gap-y-4 py-8 first:pt-0 last:pb-0 md:grid-cols-4"><div class="col-span-1 flex justify-center md:justify-start"><div class="relative h-max"><h2 class="text-xl font-bold uppercase text-neutral-800">Disclaimer</h2><span class="absolute inset-x-0 border-b-2 border-orange-400"></span><div></div></div></div><div class="col-span-1 flex flex-col md:col-span-3"><div class="flex flex-col divide-y-4"><p><a rel="license" href="http://creativecommons.org/licenses/by-nc/4.0/"><img alt="Creative Commons License" style="border-width:0" src="https://i.creativecommons.org/l/by-nc/4.0/88x31.png"/></a><br/>This work is licensed under a <a class="text-sky-600" rel="license" href="http://creativecommons.org/licenses/by-nc/4.0/">Creative Commons Attribution-NonCommercial 4.0 International License</a>.<br/>In our database, 75,578 images and their anntations are extracted from existing datasets (COCOa, ImageNet-150K, aPY, SUN). 4,885 images are from internet. We only provide image links for <b>research</b> purposes.</p></div></div></div></div></section><div class="relative bg-neutral-900 px-4 pb-6 pt-12 sm:px-8 sm:pt-14 sm:pb-8"><div class="absolute inset-x-0 -top-4 flex justify-center sm:-top-6"><a class="rounded-full bg-neutral-100 p-1 ring-white ring-offset-2 ring-offset-gray-700/80 focus:outline-none focus:ring-2 sm:p-2" href="/#hero"><svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 24 24" stroke-width="2" stroke="currentColor" aria-hidden="true" class="h-6 w-6 bg-transparent sm:h-8 sm:w-8"><path stroke-linecap="round" stroke-linejoin="round" d="M5 15l7-7 7 7"></path></svg></a></div><div class="flex flex-col items-center gap-y-6"><div id="pageview-script" class="text-sm text-neutral-700"><a href="https://www.revolvermaps.com/livestats/5r1om30zfoi/"><img src="//rf.revolvermaps.com/h/m/a/0/ff0000/128/0/5r1om30zfoi.png" width="256" height="128" alt="Map" style="border:0"/></a><script type="text/javascript" id="clstr_globe" src="//clustrmaps.com/globe.js?d=ko7teOw_sX7QKyWbHLxkMdyOA6BYkSEu0Fo1wnSs9QE"></script></div><span class="text-sm text-neutral-700">© Copyright 2022 MVIG-RHOS • Based on<!-- --> <a href="https://github.com/tbakerx/react-resume-template">tbakerx</a></span></div></div></div><script id="__NEXT_DATA__" type="application/json">{"props":{"pageProps":{}},"page":"/ocl","query":{},"buildId":"27sOB7OcKYXLbYhtS4T1I","nextExport":true,"autoExport":true,"isFallback":false,"scriptLoader":[]}</script></body></html>