diff --git "a/CtE1T4oBgHgl3EQf9wbT/content/tmp_files/load_file.txt" "b/CtE1T4oBgHgl3EQf9wbT/content/tmp_files/load_file.txt" new file mode 100644--- /dev/null +++ "b/CtE1T4oBgHgl3EQf9wbT/content/tmp_files/load_file.txt" @@ -0,0 +1,2143 @@ +filepath=/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf,len=2142 +page_content='IEEE INTERNET OF THINGS JOURNAL 1 Ancilia: Scalable Intelligent Video Surveillance for the Artificial Intelligence of Things Armin Danesh Pazho∗,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Student Member,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IEEE,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Christopher Neff∗,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Student Member,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IEEE,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ghazal Alinezhad Noghre,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Student Member,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IEEE,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Babak Rahimi Ardabili,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Student Member,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IEEE,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Shanle Yao,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Mohammadreza Baharani,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Member,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IEEE,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hamed Tabkhi,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Member,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IEEE Abstract—With the advancement of vision-based artificial in- telligence,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' the proliferation of the Internet of Things connected cameras,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' and the increasing societal need for rapid and eq- uitable security,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' the demand for accurate real-time intelligent surveillance has never been higher.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This article presents Ancilia, an end-to-end scalable, intelligent video surveillance system for the Artificial Intelligence of Things.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia brings state-of-the- art artificial intelligence to real-world surveillance applications while respecting ethical concerns and performing high-level cognitive tasks in real-time.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia aims to revolutionize the surveillance landscape, to bring more effective, intelligent, and equitable security to the field, resulting in safer and more secure communities without requiring people to compromise their right to privacy.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Index Terms—Surveillance, artificial intelligence, IoT, com- puter vision, application, real-world, real-time, edge, anomaly.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' I.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' INTRODUCTION There is a growing need for effective and efficient surveil- lance technologies that can be deployed to protect our cities, people, and infrastructure.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' For example, in Itaewon, South Korea, a holiday celebration left over 150 dead due to severe overcrowding, with many blaming the tragedy on careless government oversight [1].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' In Moore County, North Carolina, directed attacks against two power substations left over 45,000 residents without power for days as technicians rushed to restore power and authorities struggled to find the source of the attacks [2].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' With enough forewarning through smart video surveillance, they could have been prevented.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' With the recent emergence of the Artificial Intelligence of Things (AIoT), some surveillance solution providers have started adding basic forms of artificial intelligence to their systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' However, their methods are still naive and unable to enhance security in a truly meaningful way [3].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This is because, while a lot of research is conducted on tasks that would benefit surveillance systems, most works focus on algorithmic improvements in a lab environment instead of paying attention to factors that are prevalent in real-world scenarios [4], [5].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Most research focuses on a single algorithm and how to tweak it to get the best possible results on readily available datasets that often do not reflect a real surveillance environment.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Few works explore how different algorithms af- fect the performance of other downstream algorithms in multi- The authors are with the Electrical and Computer Engineering Department, The University of North Carolina at Charlotte, Charlotte, NC, 28223 USA.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' {adaneshp, cneff1, galinezh, brahimia, mbaharan, htabkhiv}@uncc.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='edu ∗ Corresponding authors have equal contribution.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' algorithm systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Few still explore the effects of noise (both data derived and the system produced) in end-to-end accuracy.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Beyond this, real-world intelligent surveillance necessitates real-time performance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The cognitive abilities of advanced artificial intelligence are only helpful if they can be provided to security personnel quickly enough to take appropriate action before it is too late.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='EdgeN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='EdgeN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='EdgeN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='EdgeN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Edge0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='L1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='LN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Cloud ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Service(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='User ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Device(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='DN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='User ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Device(s) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='DN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Conceptual overview of Ancilia.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' In this article, we present Ancilia, the first end-to-end scalable, intelligent video surveillance system able to perform high-level cognitive tasks in real-time while achieving state- of-the-art results.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia takes advantage of the prevalence of cameras in the Internet of Things (IoT) and uses localized servers and existing cameras, facilitating processing on the edge without the need for additional infrastructure upgrades.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Shown in Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1, Ancilia exists within three logical and physical segments: the edge, the cloud, and user devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The edge uses a plethora of advanced artificial intelligence algorithms processing data received from cameras to facilitate intelligent security.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Using a single workstation to perform edge processing, Ancilia can monitor up to 4 cameras in real-time at 30 FPS, or up to 8 cameras at 15 FPS, in scenarios with both medium and heavy crowd density.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia performs high-level cognitive tasks (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' action recognition, anomaly detection) with ∼ 1% deviation in accuracy from current SotA.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia is designed from the ground up to respect the pri- vacy of the people and communities being surveilled.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia does not store any personally identifiable information in any databases and does not make use of invasive artificial intelli- gence techniques such as facial recognition or gait detection.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia strictly provides a pose and locational information for high-level tasks (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' action recognition, anomaly detection), as opposed to identity information, which is common.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilla arXiv:2301.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='03561v1 [cs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='CV] 9 Jan 2023 0IEEE INTERNET OF THINGS JOURNAL 2 looks at what a person is doing, not who they are.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This allows Ancilia to act as a buffer to help remove biases based on race, ethnicity, gender, age, and socio-economic factors, which can lead to a reduction in the unnecessary conflict between authorities and marginalized communities that has become increasingly problematic.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' After data is processed on edge and sent to the cloud for communication and service management with user devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' A mobile app allows user devices to receive data from the cloud, including alerts when potential security concerns arise.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' In summary, this article has the following contributions: We present Ancilia, the first end-to-end scalable real- world intelligent video surveillance system capable of performing high-level cognitive tasks in real-time while achieving SotA accuracy.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' We analyze the ethical concerns of intelligent video surveillance, both from a privacy and fairness perspective, and illustrate how Ancilia’s design is purpose-built to address them.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' We perform an end-to-end empirical evaluation of Ancilia using two high-level cognitive tasks directly related to intelligent surveillance, action recognition, and anomaly detection, investigating the trade-off in accuracy required to achieve real-time performance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' We perform an exhaustive system-level evaluation of Ancilia’s real-time performance and scalability across different classes of hardware and increasing scenario intensities, displaying how Ancilia is able to meet real- time intelligent security needs in different contexts.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' II.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' RELATED WORK There has been a plethora of research regarding the use of artificial intelligence for video surveillance [4], [6]–[8].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [9] proposes the use of region proposal based optical flow to suppress background noise and a bidirectional Bayesian state transition strategy to model motion uncertainty to enhance spatio-temporal feature representations for the detection of salient objects in surveillance videos.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [10] proposes the use of a person detector, tracking algorithm, and mask classifier for tracking pedestrians through surveillance video streams.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' In [4], it is determined that in order to address the latency concerns of real-time video surveillance, a shift towards edge computing is needed.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Nikouei et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [11]–[13] explore the feasibility of using low-power edge devices to perform object detection and tracking in surveillance scenarios.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' They argue that in worst case 5 FPS is high enough throughput for tracking humans in surveillance applications, and as such computation can be pushed to the edge.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' However, their results show that even light weight convolutional neural networks can prove problematic for low-power devices, often reducing throughput below the 5 FPS threshold.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [14] proposes a system using low- power embedded GPUs to perform detection, tracking, path prediction, pose estimation, and multi-camera re-identification in a surveillance environment, while placing a focus on real- time execution and the privacy of tracked pedestrians.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [15] proposes a similar system, focusing solely on object detec- tion, tracking, and multi-camera re-identification to increase throughput.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [16] proposes using a combination of lightweight object detection models on the edge and more computation- ally expensive models in the cloud, splitting computation between the two to provide real-time video surveillance in a construction site environment.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [17] proposes the use of background detection, vehicle detection, and kalman filter [18] based tracking for parking lot surveillance and determining lot occupancy.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [19] proposes a system that uses object detection, person tracking, scene segmentation, and joint trajectory and activity prediction for pedestrians in a surveillance setting.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The future of intelligent surveillance is heading towards systems able to perform high-level cognitive tasks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' A recent survey focusing on real-world video surveillance [4] asserts that while the domain of video surveillance is comprised of understanding stationary object, vehicles, individuals, and crowds, the ability to determine when anomalous events oc- cur is paramount for intelligent surveillance systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Other research has supported this assertion [6].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [20] utilizes the Infinite Hidden Markov Model and Bayesian Nonparametric Factor Analysis to find patterns in video streams and detect abnormal events.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [21] proposes active learning and fuzzy aggregation to learn what constitutes an anomaly continually over time, adapting the scenarios not seen in standard datasets.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [22] proposes a system to detect suspicious behaviors in a mall surveillance setting, using lightweight algorithms such as segmentation, blob fusion, and kalman filter based tracking [18].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' AnomalyNet [23] is a recently proposed recurrent neutral network with adaptive iterative hard-thresholding and long short-term memory that works directly off pixel information to eliminate background noise, capture motion, and learn sparse representation and dictionary to perform anomaly detection in video surveillance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' III.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' ETHICAL CONCERNS Video surveillance has always been associated with social and ethical concerns, whether in traditional form or more recent intelligent formats.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Respecting citizens’ privacy and autonomy while improving public safety and security are the most well-known and enduring ethical issues in this context [24]–[27].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Developing a successful smart video surveillance solution that addresses the public safety problem and engages the community up to a certain level is only possible by considering these concerns.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' There is rising attention among scholars to the issue of incorporating privacy concerns at the design level, referred to as ”privacy by design” [28].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The source of discrimination and privacy violation in many data-driven and AI-based systems, such as Smart video surveillance technology, is using Personal Identifiable Information (PII) [29], [30].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Using PII, such as actual footage of people’s daily activities at any stage of the technology, can increase the risk of privacy violation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' There is a long-lasting debate on the ethical challenges of using facial recognition technologies in different sectors and how using this technology can result in privacy violation [31]–[34].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Avoiding facial recognition technologies does not guarantee the system is entirely privacy persevering.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Storing images of pedestrians is another source of ethical violation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' From the ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IEEE INTERNET OF THINGS JOURNAL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Neural ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Network ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Filter ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Match and ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Combine ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Algorithm ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='SQL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Database ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Boundary ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Cloud ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Cloud ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(C) Cloud ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='SA ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Flow ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='within a ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Flow ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Between ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Nodes ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Communication ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='AR ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='R ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Downstream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='F ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='OB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='T ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='E ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Downstream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='F ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='OB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='T ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='E ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Video ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Stream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Downstream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='F ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='OB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='T ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='E ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Video ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Stream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Downstream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='F ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='OB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='T ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='E ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Video ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Stream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Downstream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='F ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='OB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='T ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='E ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Video ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Stream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Downstream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='F ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='OB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='T ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='E ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Database ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Statistical ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Analysis ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(B) Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FD ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IDG ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='I ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='SA ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='High-level ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='BBP ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='BBO ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IDL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IDL C P,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IDL BBP FL D IoU Confidence Confidence Object Detector Pedestrian Tracker Pose Estimator Feature Extractor High-level Tasks Crop Selection (A) Local Node 0 BBP BBO IDL P P,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IDL C P,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IDL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='BBP ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Downstream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='F ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='OB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='T ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='E ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Video ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Stream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Downstream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='F ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='OB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='T ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='E ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Video ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Stream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Downstream ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='F ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='OB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='T ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='C ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='PB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='E ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Database ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Global ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Statistical ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Analysis ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(B) Global Node ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FD ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IDG ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='I ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='SA ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IoU ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Confidence ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pedestrian ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='High-level ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tasks ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='(A) Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='BBP ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='BBO ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IDL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='P,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IDL C P,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IDL BBP FL D Edge Boundary Video Stream from Camera (D) User Device(s) AR R User Device Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia algorithmic details.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' N local nodes are connected to a single global node on the edge.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The final analyses are transferred to the cloud node to feed the application on the user device.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Multiple edges may be connected to the could, though this figure only shows one edge for clarity.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' BBP , BBO, IDL, P, C, FL, D, FD, IDG, I, SA, R, and AR refer to bounding boxes for pedestrians, bounding boxes of objects, local identities, poses, person crops that passed selection, features from the local node, data from the downstream tasks, features from the database, global identities, information from the database, completed statistical analysis, requests from users, and requested attributes respectively.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' discrimination perspective, using any form of PII can con- tribute to the issue of marginalization in policing systems [35].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Therefore, an essential step in designing a non-discriminatory system is to ensure the system is not dependent on PII.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This requires a specific approach toward the design of such technology in the choice of algorithm, the type of data used,, and the storing of such data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia addresses this by not storing any PII or sending any PII across the network.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Such data is destroyed after it is used.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia utilizes pose-based methods for all high-level cognitive tasks, ensuring no PII is ever used in such algorithms.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This allows for such processing to occur without any potential for gender, ethnicity, or class-based discrimination.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' As such, Ancilia is able to address the ethical challenge of privacy in smart video surveillance systems while also addressing the ethical issue of discrimination.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IV.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' ANCILIA ALGORITHMIC FRAMEWORK The algorithmic core of Ancilia is separated into two conceptual systems: the local nodes containing the algorithmic pipeline of each camera and the global node that handles all processing that requires understanding of multiple camera perspectives.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' These two systems make up the algorithmic core of Ancilia and are the basis on which all higher understanding is achieved.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' A visual representation of this algorithmic core can be seen in Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Single Camera Vision Pipeline As seen in Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2, the local algorithmic pipeline starts when an image is extracted from the camera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The image is first run through an object detector to locate people, vehicles, animals, and other important objects in the scene.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This is important not only because it acts as the basis for the rest of the algorithmic pipeline but also because it can be used for basic situational awareness.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Sometimes, just the presence of a certain object in a scene is noteworthy, like a person in an unauthorized location, a bag left unattended, or the presence of a firearm.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia uses YOLOv5 [36] for this purpose (however, it can be any detector).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The locational coordinates of persons are sent to a tracker, where tracklets are created, matching each person with their previous detections in prior images.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia uses ByteTrack [37].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The tracking allows for understanding how a person moves throughout a scene, which is vital for many surveillance applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' It also allows Ancilia to understand which poses belong to which persons over time, which is vital for many high-level tasks that provide much-needed situational awareness.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Image crops of the people detected in the image are also sent to a human pose estimator, where two- dimensional pose skeletons are created.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia uses HRNet [38] for extracting 2D skeletons.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Using pose data for higher- level tasks has two major benefits over simply using raw pixel data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' First, pose data is of much lower dimensionality than pixel data, making it much less computationally expensive and allowing the Ancilia to function in real-time.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Second, pose data contains absolutely zero identifiable information, making it impossible for high-level tasks to form unintended biases based on ethnicity, gender, age, or other identity-based metrics.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Multi-Camera Person Re-identification While the tracker tracks people within a single camera, locational information cannot accurately re-identify a person across multiple cameras.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' For this, the same person crops that were sent to the human pose estimator are also sent to a person re-identification feature extractor, where an abstract feature representation is created for each person.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Only one feature representation is created for each person over a period of 30 frames, and only when the quality of the representation can be assured, as poor quality representations are detrimental to accurate multi-camera person re-identification.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia uses a ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='feature representation filtering algorithm to verify two qualities ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='IEEE INTERNET OF THINGS JOURNAL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='4 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pre- ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Processor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Transfer ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='DN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pre- ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Processor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Transfer ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='DN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pre- ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Processor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Transfer ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='DN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pre- ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Processor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Transfer ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='DN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pre- ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Processor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Transfer ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='DN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pre- ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Processor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Transfer ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='DN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Camera ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Neural ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Network ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Algorithm ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Data ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Batched ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Data ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Sequential ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Data ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Frame ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Batching ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Batching ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Comm.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Process IoT Flow within a Node Transfer to Global Node Frame Unbatching De-identified Data Queue Comm.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Process ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pre- ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Processor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Transfer ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='DN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pre- ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Processor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Object ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Detector ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Tracker ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Pose ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Estimator ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task 1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Task N-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Feature ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Extractor ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Crop ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Selection ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='A ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Transfer ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='FL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='DN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='∞ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Local Node 0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β2 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β3 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='β1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δ1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='δN-1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' A detailed view of system design in Ancilia’s local nodes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' β and δ refer to different batch sizes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' FL and D represent local features and data received from downstream tasks respectively.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' for person crops.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' First, a person crop must contain a high- quality view of the person.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' To this end, the filter algorithm uses the 2D pose skeleton and verifies that at least 9 keypoints were detected with at least 60% confidence.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The filter algorithm looks at the overlap (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Intersection of Union) of the bounding boxes generated by the object detector.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' An individual’s bound box must have an Intersection over Union (IoU) of no more than 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1 with any other person.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' If those two conditions are met, the person crop is determined to be of high enough quality to produce an adequate feature representation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' If more than one crop is deemed suitable for a single person during a 30 frame window, the one with the most confident pose is selected.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The features created by the feature extractor are sent to the global node for multi-camera person re-identification.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia uses OSNet [39] to extract feature representations.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' higher Level Tasks High-level tasks are executed on the local node, and have access to the object, tracking, and pose data generated in the previous steps.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Since the decision of which high-level tasks are needed is highly application dependent, we do not consider these tasks to be part of the Ancilia algorithmic core, but instead an extension to be customized based on intended use.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' In this paper, we use action recognition and anomaly detection as two common examples of high-level tasks that are highly relevant to intelligent surveillance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' For action recognition, we choose PoseConv3D [40] and CTR-CGN [41], two state-of- the-art networks that can utilize the 2D human pose skeletons provided by Ancilia.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' For anomaly detection, we use GEPC [42] and MPED-RNN [43], which are based on 2D human pose skeletons.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' V.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' SYSTEM DESIGN Beyond the algorithmic design, Ancilia can be analyzed from a system-level design and implementation perspective.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The local node in particular has a complex system design, as seen in Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The global node and cloud are much simpler, as shown in Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Parallelism A key design objective of Ancilia is to achieve higher efficiency by balancing throughput and latency.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia uses pipelining to take advantage of process parallelism.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Each major task is implemented as a separate process, which executes concurrently with other processes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' These processes communicate with each other using queues to utilize memory resources better and enable fast inter-process communication.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' While pipelining is a well-known technique for optimization, the overhead associated with its implementation means a balance needs to be found.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Figure 3 shows a detailed view of the system design on the local node.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Each pipeline stage is sep- arated by a queue with a size limit of λ1 elements, preventing any potential overflow from uneven execution speed between pipeline stages.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' By default, Ancilia uses a λ1 value of 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' As is common, Ancilia offloads highly parallel tasks that rely on neural networks (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' object detection, pose estimation, feature extraction, and many high-level tasks) to Graphics Processing Units (GPUs) for execution.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Data Batching Batching is another technique Ancilia implement to better utilize hardware resources.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Generally, batching is able to greatly increase the throughput of a system at the cost of end- to-end latency.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' However, many high-level tasks (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' action IEEE INTERNET OF THINGS JOURNAL 5 recognition, anomaly detection) require multiple video frames worth of input data (often called a window) before the can start processing, so the latency that would be incurred by batching input frames is already inherent in these high-level task, as long as the frame batch and high-level task window are of the same size.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Further, as frame batching ultimately increases the throughput, the end-to-end latency is decreased when compared processing each frame sequentially.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' While object detection works on entire frames, all other neural networks in Ancilia work off individual objects.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' These objects are batched together before being input to the network, greatly increasing hardware utilization.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' There can be multiple object batches within a single frame batch, based on how many of the relevant objects are detected in the video.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Local Node Once the local node receives the video stream from the camera, the proprocessor is responsible for all basic image processing necessary before sending the frames through the algorithmic core.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' After preprocessing, frames are batched in sequential segements of size β1.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia sets β1 = 30 to match the window size of high-level tasks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This is also convenient as most modern security and IoT cameras record video at either 30 or 60 FPS.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The batched frames are sent to the object detector, which outputs a list of objects with class labels and bounding box coordinates.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Bounding boxes for pedestrians are sent to the tracker, while bounding boxes for other objects are passed through the system for use in high-level tasks and statistical analysis.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' A crop of each pedestrian from the original frame is passed through to the pose estimator.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' At the tracker, bounding boxes for pedestrians are unbatched to fit the tracker’s sequential operation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The tracker groups the pedestrians and either matches them with previously seen pedestrians or assigns them a unique local ID.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Afterwards, the pedestrians are once again batched by frame and sent to the pose estimator.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' At the pose estimator, the object batching is performed on the person crops, with a batch size of β2 = 32.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' These batches are fed to the pose estimator, which outputs human pose skeletons for each person crop.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Then the pedestrian bounding boxes, person crops, local IDs, and human pose skeletons are once again batched by frame and combined with the object bounding boxes from the object detector.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The pedestrian bounding boxes, person crops, local IDs, and pose skeletons are sent to crop selection, while the pedestrian bounding boxes, object bounding boxes, local IDs, and pose skeletons are sent to each high-level task as necessary.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' While the person crops are necessary for the feature extraction that enables multi-camera re-identification, no identifiable data is sent to any of the high-level tasks, keeping in line with the ethical concerns mentioned in Sec.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' III.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Crop selection filters out low-quality person crops based on bounding box overlap and keypoint confidence, as described in Sec.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IV.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The remaining crops are then batched, with size β3 being dynamic based on the number of persons in the scene, and sent to the feature extractor.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Once features are extracted, the are sent for transfer to the server.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Each high-level task receives data at the granularity of a frame batch, and sends data to the server at whatever granularity that task requires.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Each high-level task has its own process and works in parallel with other tasks as well as with crop selection and feature extraction.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Communication is completely decoupled from the pipeline, so once the data is sent the local node pipeline continues to function as normal.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Importantly, no identifiable information is ever sent to the global node, keeping in line with the privacy and ethical concerns mentioned in Sec.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' III D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Global Node All received data is stored in a relational database on the global node.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The matching algorithm described in Sec.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IV compares the received features with existing features in the database over the period λ5 and assigns a global ID based on the results.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The default value for lλ5 is set to 1 hour, but this should be changed to suit the needs of the application.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' An assortment of algorithms performs statistical analysis using the relational database, as detailed in Sec.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IV.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The analysis is transmitted to the cloud node using APIs provided by the cloud service provider.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' By default, Ancilia uses Amazon Web Services, but this can be altered based on user/application needs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The cloud (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Amazon Web Services (AWS)) receives analyzed data from the global node.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' VI.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' EXPERIMENTAL RESULTS A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Algorithmic Core The algorithmic core of Ancilia consist of multiple algo- rithms, each of which works off of data generated by the previous algorithms.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' As these algorithms leverage imperfect neural networks, they generate noise that accumulates through the system.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' To understand the source of this noise, we must first look at the accuracy of each of these core algorithms in isolation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Table I shows the accuracies of the algorithmic core’s four main tasks: object detection, pedestrian tracking, human pose estimation, and person re-identification.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The table also shows the accuracies of the top SotA models in each task.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' These SotA methods are not suitable for intelligent surveillance applications, as their excessive computation and vast parameters make real-time execution impossible, but the comparison allows us to see the maximum potential allowable by current research and the accuracy loss incurred to keep Ancilia performing in real-time.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' TABLE I ACCURACY OF ANCILIA’S ALGORITHMIC CORE NETWORKS IN ISOLATION.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' ACCURACIES OF OBJECT DETECTION AND POSE ESTIMATION ARE USING THE COCO DATASET [44], TRACKING USING MOT20 [45], AND PERSON REID USING DUKEMTMC [46].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' SOTA REPRESENT THE HIGHEST ACCURACIES CURRENTLY ACHIEVABLE WITH STATE-OF-THE-ART METHODS WHEN COMPUTATION AND LATENCY ARE NOT A CONCERN.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Task Method Metric Accuracy SotA Object Detection YOLOv5 [36] mAP 49.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='0 65.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='0 [47] Tracking ByteTrack [37] MOTA 77.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='8 77.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='9 [48] Pose Estimation HRNet [38] AP 75.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1 81.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1 [49] Person ReID OSNet [39] Top-1 88.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='6 95.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='6 [50] Object detection sees the biggest hit to accuracy, with a 16% drop from SotA.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This is intuitive, as YOLOv5 [36] is not only IEEE INTERNET OF THINGS JOURNAL 6 the largest model in the algorithmic core, but also the only one that operates on the raw camera stream.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' So while larger models are available and would be able to produce higher accuracy, even a slight increase in model size or computation would result in a noticeable decrease in throughput.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Human pose estimation sees a decrease in accuracy for a similar reason, though much smaller in scale at only 6%.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' While HRNet [38] is not run on the raw camera stream, it is run individually for each person detected by the object detector.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' As such, maintaining a small model size is preferable.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Person re-identification sees a slightly larger drop in accuracy than human pose estimation at 7%.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' While this is partly due to using a lightweight model, OSNet [39], the SotA model for person reID is also lightweight.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' However, the SotA uses a centroid based retrieval method not suitable for pen-set reID, of which most surveillance scenarios are.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Pedestrian tracking sees almost no drop in accuracy, approximately 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1%.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This stems from the comparative ease of tracking pedestrians in a single camera, where a simple, lightweight algorithm like ByteTrack [37] see almost no performance difference from the top of the line SotA approaches.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' High-level Tasks To better understand how the noise generated by the al- gorithmic core effects overall performance, and thus how well Ancilia performs in the realm of real-world intelligent surveillance, we examine the performance of two high-level cognitive surveillance tasks when running on Ancilia.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' For Ancilia to be a benefit to intelligent surveillance tasks, we must ensure that excess false alarms or missed positive events do not occur.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' To assess this, we choose action recognition and anomaly detection, as these tasks can utilize the human pose information generated by the algorithmic core, resulting in faster and less biased inference.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Since both these methods utilize temporal batches of human poses for each individual, these experiments will directly reflect the quality of the object detection, tracking, re-identification, and pose estimation data generated by Ancilia.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1) High-level Task - Action Recognition: We select two state-of-the-art action recognition models, PoseConv3d [40] and CTR-GCN [41], and train them using data generated with Ancilia.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' For each model, we train and test with full (30 FPS) and half (15 FPS) throughput on NTU60-XSub [51].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Both models use a window size of 30 and are trained for 24 epochs using Stochastic Gradient Descent (SGD) with a momentum of 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='9 and Cosine Annealing scheduling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' PoseConv3d and CTR- GCN have weight decay of 3e−4 and 5e−4 and an initial learning rate of 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='4 and 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='2, respectively.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The results of these experiments can be seen in Tab.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' II.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' We report the Top-1 and Top-5 accuracy and compare the results using data generated by Ancilia to the original data available through the PYSKL toolbox [52].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' We can see that Ancilia is able to provide data of comparable quality to the original;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' action recognition as a high-level task in Ancilia sees around 1% drop in accuracy compared to the original data using PoseConv3D [40] at full throughput, and around 3% at half throughput.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Using CTR-GCN [41], Ancilia sees a 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5% TABLE II TOP-1 AND TOP-5 ACCURACIES ON NTU60-XSUB [51] IN FULL AND HALF THROUGHPUT MODES FOR POSECONV3D [40] AND CTR-GCN [41].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Model Data FPS Top-1 (%) Top-5 (%) PoseConv3D [40] [52] 15 91.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='96 99.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='47 30 92.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='76 99.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='57 Ours 15 88.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='79 98.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='82 30 91.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='99 99.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='28 CTR-GCN [41] [52] 15 86.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='36 98.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='46 30 83.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='07 98.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='26 Ours 15 81.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='58 97.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='52 30 80.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='44 97.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='2 drop at full throughput and a 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='8% drop at half throughput, compared to the original data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' From this we can infer that PoseConv3D is more robust to noise than CTR-GCN, however both performed reasonably well with data generated from Ancilia, demonstrating its efficacy for intelligent surveillance applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Another interesting observation is that CTR-GCN [41] actually performed noticeably better at half throughput than at full throughput.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This means that CTR-GCN is more suited to taking advantage of the higher temporal window allowed when using half throughput.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This is something to consider when choosing an action recognition model when a real-time throughput of 30 FPS cannot be guaranteed.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2) High-level Task - Anomaly Detection: Using the Shang- haiTech dataset [53] we train two state-of-the-art anomaly detection models, GEPC [42] and MPED-RNN [43], using both data generated by Ancilia and the data provided by the original authors.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The same training strategy from Sec.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' VI-B1 is used, with both models trained in full (20 FPS) and half (10 FPS) modes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' GEPC is trained for 25 epochs with a window size of 30 and stride of 20 using Adam optimizer with a learning rate of 1e-4, weight decay of 1e-5, and batch size of 512.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' MPED-RNN is trained with an input window size of 30, a reconstruction window of 12, and a prediction window of 6.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The model is trained for 5 epochs using the Adam optimizer with a learning rate of 1e−3 and a batch size of 265.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' TABLE III AUC ROC, AUC PR, AND EER ON SHANGHAITECH DATASET [53] IN FULL AND HALF THROUGHPUT MODES FOR GEPC [42] AND MPED-RNN [43].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Model Data FPS AUC ROC AUC PR EER GEPC [42] [42] 10 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='6906 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5951 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='35 20 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='7372 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='6427 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='31 Ours 10 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='6888 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5905 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='35 20 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='7223 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='6023 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='32 MPED-RNN [43] [43] 10 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='6645 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5733 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='37 20 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='7023 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5869 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='36 Ours 10 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='6685 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5661 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='37 20 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='6679 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5487 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='37 The results of this experiment can be seen in Tab.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' III.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' In line with current practices, we report Area Under the Receiver Operating Characteristic Cure (AUC ROC), Area IEEE INTERNET OF THINGS JOURNAL 7 1 2 3 4 5 6 7 8 0 10 20 30 40 50 60 Normal Heavy Extreme Nodes Throughput (FPS) (a) Server A 1 2 3 4 5 6 7 8 15 20 25 30 Normal Heavy Extreme Nodes Throughput (FPS) (b) Server B 1 2 3 4 5 6 7 8 0 10 20 30 40 50 60 Normal Heavy Extreme Nodes Throughput (FPS) (c) Workstation Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Throughput of Ancilia across different crowd densities.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hardware details can be seen in Tab.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IV.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' TABLE IV SYSTEM CONFIGURATIONS.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' STATS ARE PER CPU/GPU OF THE LISTED TYPE.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Processor GPU Name Model Cores Clock Speed Model CUDA Cores VRAM Server A 2× EPYC 7513 32 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='6 GHz 4× V100 5120 32 GB Server B 2× Xeon E5-2640 v4 10 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='4 GHz 2× Titan V 5120 12 GB Workstation Threadripper Pro 3975WX 32 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='50 GHz 3× A6000 10752 48 GB Under the Precision-recall Curve (AUC PR), and the Equal Error Rate (EER).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' With GEPC, we can see that Ancilia more than measures up to the task, with only a 1.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5% drop in AUC ROC at full throughput and less than a 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='2% drop in AUC ROC at half throughput.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' AUC PR shows a more substantial drop of 4% at full throughput, but goes down to less than 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5% at half throughput.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Equal Error Rates are almost identical, seeing almost no change (less than 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='01) when using Ancilia.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' MPED- RNN, which displayed lower overall accuracy in all regards to begin with, sees a more significant drop in AUC ROC at full throughput, losing 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5%.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' However, at half throughput the AUC ROC actually increases when using Ancilia, though only by 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5%.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The AUC PR results mirror that of GEPC, dropping 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='8% at full throughput and 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='7% at half throughput.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The Equal Error Rates are once again nearly identical.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Being able to perform a high-level task such as anomaly detection while maintaining accuracies so close to current SotA in research, demonstrates Ancilia’s ability to produce quality data, suitable for intelligent surveillance applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Real-time System Performance Algorithmic accuracy is vital for ensuring the information provided by high-level cognitive tasks is beneficial for surveil- lance applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' However, Ancilia’s ability to perform in real-time is equally important.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' We conduct a series of ex- periments, evaluating the runtime performance of Ancilia on different hardware, with different scenario intensities, and for increasing number of local nodes per hardware device.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' We focus on the performance of the local node, as the global node is completely decoupled from the algorithmic pipeline and has no noticeable effect on throughput or latency.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' We choose three different hardware configurations for these experiments: a high-end server, a lower-end server, and a high- end workstation, as seen in Tab.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IV.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' For our scenarios, we use the DukeMTMC-video dataset [46] and pick three scenes with different crowd densities: normal density, heavy density, and extreme density.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The distribution of detection density in each scenario can be seen in Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Note that what is considered ”normal density” will change based on application environment, which is why we report on such a wide range.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Each video lasts for 32k frames, with 7k frames warm-up and cool-down.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' We test using 1, 2, 4, 6, and 8 local nodes on a single system, showing how throughput and latency scale in such cases.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Each experiment is conducted three times, the throughput and latency averaged across runs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The results of these experiments can be seen in Tab.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' V and Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The distribution of throughput in these scenes can be seen in Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Under normal crowd density, Server A and Workstation are both able to achieve over 50 FPS with a single local node, with an end-to-end latency of 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='39 and 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='90 seconds respectively.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This is well above the 30 and 20 FPS required by action recognition and anomaly detection algorithms at full throughput, and the latency is low enough to be suitable for most surveillance applications where the main concern is notify authorities in time for appropriate response.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Server A is able to handle 6 local nodes in the normal scenario while maintaining above 30 FPS, while Workstation can do so with 4 nodes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Server A is able to maintain above 26 FPS while running all 8 local nodes, while Workstation drops to just below 18 FPS at 8 local nodes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Server B falls just short of 30 FPS even with a single node in normal crowd density.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' However, it is able to maintain above 20 FPS while handling two nodes simultaneously.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Due to having only a single GPU and limited VRAM, Server B was unable to run 4 or more nodes concurrently.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Heavy crowd density proves more challenging, with both Server A and Workstation only able to achieve above 30 FPS with up to 4 nodes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The end-to-end latency is also longer than it was under normal crowd density, with Server A seeing almost double the latency and Workstation seeing around a 20% to 80% increase in most cases.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Server A and Workstation are able to mainatin above 20 FPS at 8 and 6 IEEE INTERNET OF THINGS JOURNAL 8 TABLE V AVERAGE THROUGHPUT AND LATENCY WHEN RUNNING MULTIPLE LOCAL NODES ON A SINGLE SERVER.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Server A Server B Workstation Crowd Density Nodes FPS Latency (s) FPS Latency (s) FPS Latency (s) Normal (70 detections per second) 1 52.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='94 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='39 29.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='76 9.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='73 54.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='91 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='90 2 48.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='99 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='97 22.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='07 12.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='7 45.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='06 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='97 4 38.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='91 7.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='29 31.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='67 8.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='53 6 31.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='35 12.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='55 23.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='10 13.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='61 8 26.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='51 17.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='94 17.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='98 24.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='95 Heavy (216 detections per second) 1 40.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='16 15.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='66 26.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='48 11.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='93 48.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='52 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='95 2 41.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='22 10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='87 19.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='55 14.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='51 41.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='45 7.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='10 4 34.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='54 14.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='48 30.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='01 11.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='20 6 27.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='05 22.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='42 20.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='99 30.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='65 8 20.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='02 34.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='68 15.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='77 46.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='28 Extreme (744 detections per second) 1 17.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='80 36.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='04 14.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='50 43.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='81 25.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='68 24.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='78 2 20.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='90 30.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='73 13.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='40 47.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='57 23.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='52 26.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='97 4 17.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='27 38.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='15 17.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='56 39.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='43 6 9.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='56 76.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='71 8.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='94 94.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='49 8 6.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='49 130.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='08 6.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='31 134.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='82 nodes respectively, while Workstation drops to just above 15 FPS at 8 nodes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Interestingly, with heavy crowd density we start to see unusual behavior with Server A having worse performance with a singe node than it does with 2 nodes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This is caused by the abundance of CPU and GPU resources available to the server and a single node being unable to fully utilize them.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' As such, the behavior of Server A in the heavy and extreme crowd density scenarios does not start to match the expected behavior and mimic the other systems until multiple nodes are being run simultaneously.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This behavior is not too concerning, considering it does not make sense to purchase such a high-end server class machine for only running a single local node, when a more latency focused workstation would be both cheaper and more effective.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Server B behaves similarly to how it did with normal crowd density, except that it falls slightly below 20 FPS when running two nodes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Assuming only half throughput was needed for high- level tasks, Server B would still be suitable for running up to two nodes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' With the extreme crowd density scenario, Ancilia begins to struggle.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' None of the systems are able to achieve above 30 FPS even with a single camera, putting full throughput action recognition out of reach.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Server A is able to achieve above 20 FPS with 2 nodes (but notably not with 1) and Workstation is able to do so with 1 or 2 nodes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Both Server A and Workstation can maintain above 15 FPS at 4 nodes, but both drop to around 9 and 6 FPS at 6 and 8 nodes, respectively.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [54] argues that 5 FPS is suitable for tacking pedestrians, and while that is true, high-level tasks that rely on detailed human motion, such as action recognition and anomaly detection, often struggle for accuracy when running below 10 FPS.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Another issue is with the increased latency.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Running only 1 node, Server A and Workstation have latencies of 36 seconds and 25 seconds respectively, which is suitable for many surveillance applications, but might be too much for those that require sharper response times.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The latency increase to over 2 minutes for both systems with 8 nodes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Combined with the low throughput, it becomes difficult to recommend running more than 4 nodes on a single system with Ancilia when operating under extreme crowd density, expect 0 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='05 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='15 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='2 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='25 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='3 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='35 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='4 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='45 0 5 10 15 20 Extreme Heavy Normal Average Number of Detections/Batch Probability 0 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='02 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='04 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='06 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='08 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='12 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='14 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='16 20 30 40 50 60 70 80 90 Extreme Heavy Normal Throughput (FPS) Probability Fig.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' The performance of Ancilia in terms of throughput and detection distribution.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' for applications where low throughput and high latency are not as much of a concern.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Server B is unable to achieve 15 FPS, but does stay above 10 FPS for both 1 and 2 nodes, making it suitable for half throughput in anomaly detection.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' However, the latencies of 44 and 48 seconds might be too much for some applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' This is the extreme scenario, and it understandably provides quite the challenge for real-time execution.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Overall, Ancilia is able to meet the needs of high-level cog- nitive tasks while still achieving performance suitable for real- IEEE INTERNET OF THINGS JOURNAL 9 time intelligent surveillance applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Exact performance is dependent on both the hardware used and the intensity of the scene, but these results show that even for the most extreme of scenarios, Ancilia can be used to provide intelligent assistance to surveillance applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' VII.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' CONCLUSION In this article we presented Ancilia, an end-to-end scal- able intelligent video surveillance system for the Artificial Intelligence of Things.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Through empirical evaluation, Ancilia has demonstrated its ability to bring state-of-the-art artificial intelligence to real-world surveillance applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ancilia performs high-level cognitive tasks (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' action recognition and anomaly detection) in real-time, all while respecting ethical and privacy concerns common to surveillance applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' ACKNOWLEDGMENTS This research is supported by the National Science Foun- dation (NSF) under Award No.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1831795 and NSF Graduate Research Fellowship Award No.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1848727.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' REFERENCES [1] C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Alexandre, “The public safety implications of the itaewon tragedy,” Dec 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://thediplomat.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='com/2022/ 12/the-public-safety-implications-of-the-itaewon-tragedy/ [2] N.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Salahieh, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Miller, and H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Yan, “As north carolinians regain power, investigators probe terrorism and threats against power substations across the us.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' one expert explains what needs to be done,” Dec 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://www.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='cnn.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='com/2022/12/08/us/ power-outage-moore-county-investigation-thursday/index.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='html [3] S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Feldstein and C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' E.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' for International Peace, The Global Expansion of AI Surveillance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Carnegie Endowment for International Peace, 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://books.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='google.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='com/books?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='id= W9JQzQEACAAJ [4] M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Patrikar, Devashree R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Parate, “Anomaly detection using edge computing in video surveillance system: review,” International Journal of Multimedia Information Retrieval, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 85–110, 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://doi.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='org/10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1007/s13735-022-00227-8 [5] A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Danesh Pazho, G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Alinezhad Noghre, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Purkayastha, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Vempati, O.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Martin, and H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Tabkhi, “A comprehensive survey of graph-based deep learning approaches for anomaly detection in complex distributed systems,” arXiv preprint arXiv:2206.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='04149, 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [6] X.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Li and Z.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='-m.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Cai, “Anomaly detection techniques in surveillance videos,” in 2016 9th International Congress on Image and Signal Pro- cessing, BioMedical Engineering and Informatics (CISP-BMEI), 2016, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 54–59.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [7] T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Li, H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Chang, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wang, B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ni, R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hong, and S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Yan, “Crowded scene analysis: A survey,” IEEE Trans.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Cir.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' and Sys.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' for Video Technol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=', vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 25, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 3, p.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 367–386, mar 2015.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://doi.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='org/10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1109/TCSVT.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='2014.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='2358029 [8] B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Shobha and R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Deepu, “A review on video based vehicle detection, recognition and tracking,” in 2018 3rd International Conference on Computational Systems and Information Technology for Sustainable Solutions (CSITSS), 2018, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 183–186.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [9] J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhang, C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Xu, Z.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Gao, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Rodrigues, and V.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' de Al- buquerque, “Industrial pervasive edge computing-based intelligence iot for surveillance saliency detection,” IEEE Transactions on Industrial Informatics, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 17, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 7, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 5012–5020, 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [10] G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Draughon, P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Sun, and J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Lynch, “Implementation of a computer vision framework for tracking and visualizing face mask usage in urban environments,” in 2020 IEEE International Smart Cities Conference (ISC2), 2020, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1–8.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [11] R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Xu, S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Nikouei, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Chen, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Polunchenko, S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Song, C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Deng, and T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Faughnan, “Real-time human objects tracking for smart surveillance at the edge,” in 2018 IEEE International Conference on Communications (ICC), 2018, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1–6.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [12] S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Nikouei, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Chen, S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Song, R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Xu, B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='-Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Choi, and T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Faughnan, “Smart surveillance as an edge network service: From harr-cascade, svm to a lightweight cnn,” in 2018 IEEE 4th International Conference on Collaboration and Internet Computing (CIC), 2018, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 256–265.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [13] S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Nikouei, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Chen, S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Song, B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='-Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Choi, and T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Faughnan, “Toward intelligent surveillance as an edge network service (isense) using lightweight detection and tracking algorithms,” IEEE Transactions on Services Computing, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 14, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 6, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1624–1637, 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [14] C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Neff, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Mendieta, S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Mohan, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Baharani, S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Rogers, and H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Tabkhi, “Revamp2t: Real-time edge video analytics for multicamera privacy- aware pedestrian tracking,” IEEE Internet of Things Journal, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 7, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 4, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2591–2602, 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [15] B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Gaikwad and A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Karmakar, “Smart surveillance system for real-time multi-person multi-camera tracking at the edge,” in Journal of Real-Time Image Processing, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 18, 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [16] Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhao, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Yin, and G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Gui, “Lightweight deep learning based intel- ligent edge surveillance techniques,” IEEE Transactions on Cognitive Communications and Networking, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 6, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 4, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1146–1154, 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [17] R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ke, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhuang, Z.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Pu, and Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wang, “A smart, efficient, and reliable parking surveillance system with edge artificial intelligence on iot devices,” IEEE Transactions on Intelligent Transportation Systems, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 22, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 8, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 4962–4974, 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [18] R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' E.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Kalman, “A new approach to linear filtering and prediction problems,” Transactions of the ASME–Journal of Basic Engineering, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 82, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Series D, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 35–45, 1960.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [19] J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Liang, L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Jiang, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Niebles, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hauptmann, and L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Fei-Fei, “Peeking into the future: Predicting future person activities and locations in videos,” in Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR), June 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [20] V.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Nguyen, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Phung, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='-S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Pham, and S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Venkatesh, “Bayesian nonparametric approaches to abnormality detection in video surveillance,” Annals of Data Science, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 21–41, 2015.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://doi.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='org/10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1007/s40745-015-0030-3 [21] R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Nawaratne, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Alahakoon, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' De Silva, and X.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Yu, “Spatiotemporal anomaly detection using deep learning for real-time video surveillance,” IEEE Transactions on Industrial Informatics, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 16, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 393– 402, 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [22] R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Arroyo, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Yebes, L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Bergasa, I.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Daza, and J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Almaz´an, “Expert video-surveillance system for real-time detection of suspicious behaviors in shopping malls,” Expert Systems with Applications, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 42, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 21, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 7991–8005, 2015.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https: //www.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='sciencedirect.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='com/science/article/pii/S0957417415004182 [23] J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhou, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Du, H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhu, X.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Peng, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Liu, and R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Goh, “Anomalynet: An anomaly detection network for video surveillance,” IEEE Transactions on Information Forensics and Security, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 14, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 10, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2537–2550, 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [24] J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Pierce, R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wong, and N.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Merrill, “Sensor illumination: Exploring design qualities and ethical implications of smart cameras and image/video analytics,” in Proceedings of the 2020 CHI Conference on Human Factors in Computing Systems, ser.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' CHI ’20.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' New York, NY, USA: Association for Computing Machinery, 2020, p.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 1–19.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://doi.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='org/10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1145/3313831.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='3376347 [25] H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Nissenbaum, “Privacy as contextual integrity,” Wash.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Rev.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=', vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 79, p.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 119, 2004.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [26] Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' E.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Appenzeller, P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Appelbaum, and M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Trachsel, “Ethical and practical issues in video surveillance of psychiatric units,” Psychiatric Services, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 71, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 5, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 480–486, 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [27] F.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Tariq, N.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Kanwal, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ansari, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Afzaal, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' N.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Asghar, and M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Anjum, “Towards a privacy preserving surveillance approach for smart cities,” in 3rd Smart Cities Symposium (SCS 2020), vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2020, 2020, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 450–455.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [28] W.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hartzog, Privacy ˜Os Blueprint: The Battle to Control the Design of New Technologies.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Harvard University Press, 2018.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [29] J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Daubert, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wiesmaier, and P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Kikiras, “A view on privacy & trust in iot,” in 2015 IEEE International Conference on Communication Workshop (ICCW).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IEEE, 2015, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2665–2670.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [30] T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Speicher, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ali, G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Venkatadri, F.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' N.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ribeiro, G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Arvanitakis, F.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Benevenuto, K.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Gummadi, P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Loiseau, and A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Mislove, “Potential for discrimination in online targeted advertising,” in Conference on Fairness, Accountability and Transparency.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' PMLR, 2018, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 5–19.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [31] I.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Raji, T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Gebru, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Mitchell, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Buolamwini, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Lee, and E.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Denton, “Saving face: Investigating the ethical concerns of facial recognition auditing,” in Proceedings of the AAAI/ACM Conference on AI, Ethics, and Society, 2020, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 145–151.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [32] N.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Martinez-Martin, “What are important ethical implications of using facial recognition technology in health care?”' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' AMA journal of ethics, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 21, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2, p.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' E180, 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [33] L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Introna and H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Nissenbaum, “Facial recognition technology a survey of policy and implementation issues,” 2010.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' IEEE INTERNET OF THINGS JOURNAL 10 [34] E.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Selinger and B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Leong, “The ethics of facial recognition technology,” Forthcoming in The Oxford Handbook of Digital Ethics ed.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Carissa V´eliz, 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [35] D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Leslie, “Understanding bias in facial recognition technologies,” Tech.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Rep.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=', 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://zenodo.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='org/record/4050457 [36] G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Jocher, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Chaurasia, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Stoken, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Borovec, NanoCode012, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Kwon, K.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Michael, TaoXie, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Fang, imyhxy, Lorna, Z.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Yifu, C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wong, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' V, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Montes, Z.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wang, C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Fati, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Nadar, Laughing, UnglvKitDe, V.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Sonck, tkianai, yxNONG, P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Skalski, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hogan, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Nair, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Strobel, and M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Jain, “ultralytics/yolov5: v7.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='0 - YOLOv5 SOTA Realtime Instance Segmentation,” Nov.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://doi.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='org/10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='5281/zenodo.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='7347926 [37] Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhang, P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Sun, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Jiang, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Yu, F.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Weng, Z.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Yuan, P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Luo, W.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Liu, and X.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wang, “Bytetrack: Multi-object tracking by associating every detection box,” 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [38] K.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Sun, B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Xiao, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Liu, and J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wang, “Deep high-resolution represen- tation learning for human pose estimation,” in CVPR, 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [39] K.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhou, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Yang, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Cavallaro, and T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Xiang, “Omni-scale feature learning for person re-identification,” in ICCV, 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [40] H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Duan, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhao, K.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Chen, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Lin, and B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Dai, “Revisiting skeleton- based action recognition,” in Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition, 2022, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2969–2978.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [41] Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Chen, Z.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhang, C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Yuan, B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Li, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Deng, and W.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hu, “Channel- wise topology refinement graph convolution for skeleton-based action recognition,” in Proceedings of the IEEE/CVF International Conference on Computer Vision, 2021, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 13 359–13 368.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [42] A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Markovitz, G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Sharir, I.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Friedman, L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zelnik-Manor, and S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Avidan, “Graph embedded pose clustering for anomaly detection,” in Proceed- ings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition, 2020, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 10 539–10 547.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [43] R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Morais, V.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Le, T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Tran, B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Saha, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Mansour, and S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Venkatesh, “Learning regularity in skeleton trajectories for anomaly detection in videos,” in Proceedings of the IEEE/CVF conference on computer vision and pattern recognition, 2019, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 11 996–12 004.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [44] T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='-Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Lin, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Maire, S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Belongie, L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Bourdev, R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Girshick, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hays, P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Perona, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ramanan, C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zitnick, and P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Doll´ar, “Microsoft coco: Common objects in context,” 2014.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [45] P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Dendorfer, H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Rezatofighi, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Milan, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Shi, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Cremers, I.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Reid, S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Roth, K.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Schindler, and L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Leal-Taix´e, “Mot20: A benchmark for multi object tracking in crowded scenes,” 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://arxiv.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='org/abs/2003.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='09003 [46] E.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ristani, F.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Solera, R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zou, R.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Cucchiara, and C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Tomasi, “Performance measures and a data set for multi-target, multi-camera tracking,” in European Conference on Computer Vision workshop on Benchmarking Multi-Target Tracking, 2016.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [47] W.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wang, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Dai, Z.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Chen, Z.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Huang, Z.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Li, X.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhu, X.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hu, T.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Lu, L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Lu, H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Li et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=', “Internimage: Exploring large-scale vision foundation mod- els with deformable convolutions,” arXiv preprint arXiv:2211.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='05778, 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [48] L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zheng, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Tang, Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Chen, G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhu, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wang, and H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Lu, “Improving multiple object tracking with single object tracking,” in Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR), June 2021, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2453–2462.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [49] Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Xu, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhang, Q.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Zhang, and D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Tao, “ViTPose: Simple vision transformer baselines for human pose estimation,” in Advances in Neural Information Processing Systems, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Oh, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Agarwal, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Belgrave, and K.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Cho, Eds.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=', 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https: //openreview.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='net/forum?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='id=6H2pBoPtm0s [50] M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wieczorek, B.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Rychalska, and J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Dabrowski, “On the unreasonable effectiveness of centroids in image retrieval,” in Neural Information Processing: 28th International Conference, ICONIP 2021, Sanur, Bali, Indonesia, December 8–12, 2021, Proceedings, Part IV.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Berlin, Heidelberg: Springer-Verlag, 2021, p.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 212–223.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://doi.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='org/10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='1007/978-3-030-92273-3 18 [51] J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Liu, A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Shahroudy, M.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Perez, G.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wang, L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='-Y.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Duan, and A.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' C.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Kot, “Ntu rgb+ d 120: A large-scale benchmark for 3d human activity understanding,” IEEE transactions on pattern analysis and machine intelligence, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 42, no.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 10, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 2684–2701, 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [52] H.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Duan, J.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wang, K.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Chen, and D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Lin, “Pyskl: Towards good practices for skeleton action recognition,” 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [Online].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Available: https://arxiv.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='org/abs/2205.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='09443 [53] W.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Liu, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' W.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Luo, and S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Gao, “Future frame prediction for anomaly detection – a new baseline,” in 2018 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), 2018.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' [54] L.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Wang, D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Q.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Huynh, and P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Koniusz, “A comparative review of recent kinect-based action recognition algorithms,” IEEE Transactions on Image Processing, vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 29, pp.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' 15–28, 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' BIOGRAPHY Armin Danesh Pazho (S’22) is currently a Ph.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' student at the University of North Carolina at Char- lotte, NC, United States.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' With a focus on Artificial Intelligence, Computer Vision, and Deep Learning, his research delves into the realm of developing AI for practical, real-world applications and addressing the challenges and requirements inherent in these fields.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Specifically, his research covers action recog- nition, anomaly detection, person re-identification, human pose estimation, and path prediction.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Christopher Neff (S’18) is a National Science Foundation Graduate Research Fellow and Doctoral Candidate at the University of North Carolina at Charlotte.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' His dissertation focus is on tackling the challenges of bringing human-centric computer vi- sion to real-world applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' His previous work focuses on person re-identification, human pose es- timation, action recognition, real-time system devel- opment, lightweight algorithms, noisy data, domain shift, and real-world applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Ghazal Alinezhad Noghre (S’22) is currently pur- suing her Ph.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' in Electrical and Computer Engi- neering at the University of North Carolina at Char- lotte, NC, United States.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Her research concentrates on Artificial Intelligence, Machine Learning, and Computer Vision.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' She is particularly interested in the applications of anomaly detection, action recogni- tion, and path prediction in real-world environments, and the challenges associated with these fields.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Babak Rahimi Ardabili is a Ph.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' student in the Public Policy Analysis program at the University of North Carolina at Charlotte, United States.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' His main research area is emerging technologies policy making.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' He mainly focuses on the intersection of Artificial Intelligence and policy from a privacy perspective and the challenges of bringing the tech- nology to the community.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Shanle Yao is an Electrical Engineering Graduate student from the University of North Carolina at Charlotte.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' His dissertation focus is on optimization and application of Computer Vision pipeline perfor- mance and throughput.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' His areas of interest include object detection, multiple objects tracking, human pose estimation, semantic segmentation and real- world applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Mohammedreza Baharani is an ML researcher and edge system deployment engineer at ForesightCares.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' He received his Ph.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' in computer engineering in 2021 from the University of North Carolina at Charlotte, USA, and was a postdoctoral researcher at the TeCSAR Lab.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' His research focuses on the intersection of computer architecture engineering and machine learning, with the goal of enabling AI algorithms on edge devices to have a positive impact in fields such as healthcare.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hamed Tabkhi (S’07–M’14) is an Associate Pro- fessor in the Department of Electrical and Com- puter Engineering, University of North Carolina at Charlotte, USA.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' He was a post-doctoral research associate at Northeastern University.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Hamed Tabkhi received his Ph.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content='D.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' degree in 2014 from Northeast- ern University under the direction of Prof.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Gunar Schirner.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Overall, his research focuses on transfor- mative computer systems and architecture for cyber- physical, real-time streaming and emerging machine learning applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' Award1831795 d PI), Shannon Reid, Dougl.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'} +page_content=' er,Robert Phocas, Arun Ravi leta,ChristopherNeff,James &Integrative Communit oroach ervice of Public t"policing edge tiple co' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/CtE1T4oBgHgl3EQf9wbT/content/2301.03561v1.pdf'}