diff --git "a/DtFQT4oBgHgl3EQfPzZf/content/tmp_files/load_file.txt" "b/DtFQT4oBgHgl3EQfPzZf/content/tmp_files/load_file.txt"
new file mode 100644--- /dev/null
+++ "b/DtFQT4oBgHgl3EQfPzZf/content/tmp_files/load_file.txt"
@@ -0,0 +1,1444 @@
+filepath=/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf,len=1443
+page_content='WebUI: A Dataset for Enhancing Visual UI Understanding with Web Semantics Jason Wu HCI Institute, Carnegie Mellon University Pittsburgh, PA, USA jsonwu@cmu.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='edu Siyan Wang Wellesley College Wellesley, MA, USA sw1@wellesley.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='edu Siman Shen Grinnell College Grinnell, IA, USA shenlisa@grinnell.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='edu Yi-Hao Peng HCI Institute, Carnegie Mellon University Pittsburgh, PA, USA yihaop@cs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='cmu.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='edu Jeffrey Nichols Snooty Bird LLC USA jwnichls@gmail.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com Jeffrey P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Bigham HCI Institute, Carnegie Mellon University Pittsburgh, PA, USA jbigham@cs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='cmu.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='edu ABSTRACT Modeling user interfaces (UIs) from visual information allows sys- tems to make inferences about the functionality and semantics needed to support use cases in accessibility, app automation, and testing.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Current datasets for training machine learning models are limited in size due to the costly and time-consuming process of manually collecting and annotating UIs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We crawled the web to construct WebUI, a large dataset of 400,000 rendered web pages associated with automatically extracted metadata.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We analyze the composition of WebUI and show that while automatically extracted data is noisy, most examples meet basic criteria for visual UI mod- eling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We applied several strategies for incorporating semantics found in web pages to increase the performance of visual UI un- derstanding models in the mobile domain, where less labeled data is available: (i) element detection, (ii) screen classification and (iii) screen similarity.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' KEYWORDS Dataset;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' UI Modeling;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Computer Vision;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Transfer Learning;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Web Semantics;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Computational Interaction ACM Reference Format: Jason Wu, Siyan Wang, Siman Shen, Yi-Hao Peng, Jeffrey Nichols, and Jeffrey P.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Bigham.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2023.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' WebUI: A Dataset for Enhancing Visual UI Understanding with Web Semantics.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 2023 CHI Conference on Human Fac- tors in Computing Systems (CHI ’23), April 23–28, 2023, Hamburg, Germany.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' ACM, New York, NY, USA, 14 pages.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' https://doi.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='org/10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1145/3544548.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='3581158 1 INTRODUCTION Computational modeling of user interfaces (UIs) allows us to under- stand design decisions [15, 28], improve their accessibility [55], and automate their usage [7, 31, 32].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Often, these systems must interact with UIs in environments with incomplete or missing metadata (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', Permission to make digital or hard copies of part or all of this work for personal or classroom use is granted without fee provided that copies are not made or distributed for profit or commercial advantage and that copies bear this notice and the full citation on the first page.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Copyrights for third-party components of this work must be honored.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For all other uses, contact the owner/author(s).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' CHI ’23, April 23–28, 2023, Hamburg, Germany © 2023 Copyright held by the owner/author(s).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' ACM ISBN 978-1-4503-9421-5/23/04.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' https://doi.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='org/10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1145/3544548.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='3581158 mobile apps authored with inaccessible UI toolkits).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This presents many challenges since it necessitates that they reliably identify and reason about the functionality of the UI to support downstream applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Visual modeling of UIs, which has shown to be a promising solution, predicts information directly from a screen- shot using machine learning models and introduces no additional dependencies.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Building the datasets needed to train accurate visual models involves collecting a large number of screenshots paired with their underlying semantic or structural representations.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Recent efforts to collect datasets [15, 55] for data-driven modeling have focused on mobile apps, which are typically manually crawled and annotated by crowdworkers since they are often difficult to automate.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This process is both time-consuming and expensive — prior work has estimated that collecting a dataset of 72,000 app screens from 10,000 apps took 5 months and cost $20,000 [15].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Because of this, datasets for visual UI modeling are limited in size and can be prohibitively expensive to keep updated.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The web presents a possible solution to UI data scarcity since web pages are a promising source of data to bootstrap and enhance visual UI understanding.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In contrast to mobile UIs, web UIs (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', web pages) are much easier to crawl since they are authored in a unified parsable language (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', HTML) that typically exposes semantics (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', links and listeners) necessary for automated navigation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The same web page can also be viewed in many different viewports and display settings, which makes it possible to collect a large dataset of UIs rendered on a variety of devices (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', a smartphone or tablet).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In addition, web browsers offer several facilities to extract visual, semantic, and stylistic information programmatically.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In particular, web conventions, such as the semantic HTML and the ARIA initiatives, while not always adopted, constitute a large, if potentially noisy, source of annotations for UI elements.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Finally, the web offers a virtually unlimited supply of data and has already been employed as a data source for large-scale machine learning [23, 52, 53].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We explore the possibility of automatically collecting and labeling a large dataset of web UIs to support visual UI modeling in other domains (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', mobile).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Compared to previous web datasets [28], our dataset is much larger, more recent, and contains semantic information needed to support common visual UI understanding tasks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' arXiv:2301.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='13280v1 [cs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='HC] 30 Jan 2023 CHI ’23, April 23–28, 2023, Hamburg, Germany Wu et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In this paper, we show that a large dataset of automatically collected web pages can improve the performance of visual UI Understanding models through transfer learning techniques, and we verify this phenomenon for three tasks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We first describe the platform that we built to crawl websites automatically and scrape relevant visual, semantic, and style data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our crawler visited a total of approximately 400,000 web pages using different simulated devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' WebUI, the resulting dataset is an order of magnitude larger than other publicly available datasets [28].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Next, we analyzed our dataset’s composition and estimated data quality using several automated metrics: (i) element size, (ii) element occlusion, and (iii) layout responsiveness.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We found that most websites met basic criteria for visual UI modeling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Finally, we propose a framework for incorporating web semantics to enhance the performance of existing visual UI understanding approaches.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We apply it to three tasks in the literature: (i) element detection, (ii) screen classification and (iii) video screen similarity and show that incorporating web data improves performance in other target domains, even when labels are unavailable.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To summarize, our paper makes the following contributions: (1) The WebUI dataset, which consists of 400,000 web pages each accessed with multiple simulated devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We collected WebUI using automated web crawling and automatically associated web pages with visual, semantic, and stylistic information that can generalize to UIs of other platforms.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' (2) An analyis of the composition and quality of examples in WebUI for visual UI modeling in terms of (i) element size, (ii) element occlusion, and (iii) website layout responsiveness.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' (3) A demonstration of the usefulness of the WebUI dataset through three applications from the literature: (i) element detection, (ii) screen classification and (iii) screen similarity.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We show that incorporating web data can lead to perfor- mance improvements when used in a transfer learning set- ting, and we verified its improvement for our three tasks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We envision that similar approaches can be used for other tasks common in visual UI understanding.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Furthermore, we show that models trained on only web data can often be directly applied to other domains (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', Android app screens).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' All code, models, and data will be released to the public to encourage further research in this area.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2 RELATED WORK 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1 Datasets for UI Modeling There have been several datasets collected to support UI modeling, mostly in the mobile domain.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Several datasets have been collected to support training specialized models [26, 40, 44] .' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The AMP dataset consists of 77k screens from 4,068 iOS apps and was originally used to train Screen Recognition, an enhanced screen reader [55], but has also been extended with additional pairwise annotations to support automated crawling applications [20].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The largest publicly available dataset Rico, which consists of 72K app screens from 9.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7K Android apps, was collected using a combination of automated and human crawling [15].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' It captures aspects of user interfaces that are static (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', app screenshots) and dynamic (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', animations and user interaction traces).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Rico has served as the primary source of data for much UI understanding research and it has been extended and re-labeled to support many downstream applications, such as natural language interaction [7, 32, 49] and UI retrieval for design [6, 15].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Nevertheless, Rico has several weaknesses [14].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Several works have identified labeling errors and noise (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', nodes in the view hierarchy do not match up with the screenshot).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To this end, efforts have been made to repair and filter examples.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Enrico first randomly sampled 10,000 examples from Rico then cleaned and provided additional annotations for 1460 of them [29].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The VINS dataset [6] is a dataset for UI element detection that was created by collecting and manually taking screenshots from several sources, including Rico.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The Clay dataset (60K app screens) was generated by denoising Rico through a pipeline of automated machine learning models and human annotators to provide element labels [30].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Rico and other manually annotated datasets are expensive to create and update, and thus, models trained on them may exhibit degraded performance on newer design guidelines (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', Material Design is an updated design look for Android).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, Rico was collected in early 2017 and has yet to see any update.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Finally, many of these datasets focus on one particular platform (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', mobile phone) and therefore may learn visual patterns specific to the screen dimensions.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, “hamburger menus” are usually used in mobile apps while desktop apps may use navigation bars.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In our work, we scrape the web for examples of UIs, which addresses some drawbacks (high cost, difficult to update, device- dependent) of current datasets but not others (dataset noise).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The closest to our work is Webzeitgeist [28], which also used automated crawling to mine the design of web pages.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To support design mining and machine learning applications, Webzeitgeist crawled 103,744 webpages and associated web elements with extracted properties such as HTML tag, size, font, and color.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This work is primarily used for data-driven design applications and does not attempt to transfer semantics to other domains.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We also collect multiple views of each website and query the browser for accessibility metadata, which can further facilitate UI modeling applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2 Applications of UI Datasets Applications that operate and improve existing UIs must reliably identify their composition and functionality.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Originally, many relied on pixel-based or heuristic matching [1, 18, 43, 54].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The introduc- tion of large UI datasets, such as those previously discussed, have provided the opportunity to learn more robust computational mod- els, especially those from visual data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The goal of this paper is to improve the performance of these computational models by lever- aging a large body of web data and its associated semantics.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' There have been many efforts to learn the semantics of UIs [37, 49, 50].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In this paper, we focus on three modeling tasks at the (i) element (ele- ment detection), (ii) screen (screen classification), and (iii) app-level (screen similarity).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Element detection identifies the location and type of UI widgets from a screenshot and has applications in accessibility metadata repair [55], design search [6], and software testing [12, 51].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Labeled datasets for element detection exist [6, 15, 30, 55];' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' however they are quite small compared to other datasets for object detection [36] which contain an order of magnitude more examples (330K).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We found that incorporating our web UI dataset (400K examples) in a WebUI: A Dataset for Enhancing Visual UI Understanding with Web Semantics CHI ’23, April 23–28, 2023, Hamburg, Germany pre-training phase led to performance benefits.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Other work involves modeling UIs at a higher level (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', screen-level) to reason about the design categorization [29] and purpose [49] of a screen.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Similarly, datasets with screen-level annotations of UIs are much smaller than others used in the CV literature [17] so we used additional web data to improve accuracy.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Finally, we investigated screen similarity, a task that reasons about multiple UI inputs (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', frames of a video recording), where no publicly available labeled data exists.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We found that models trained on related web semantics (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', URL similarity) were able to successfully generalize to mobile screens.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In summary, our paper shows that applying examples from the web and relevant machine learning techniques can improve the performance of computational models that depend on UI data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='3 Related Machine Learning Approaches We briefly introduce and summarize three machine learning ap- proaches that we apply in our paper.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Broadly, they fall under a body of research known as “transfer learning” which uses knowledge from learning one task (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', web pages) to improve performance on another (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', mobile app screens).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Inductive transfer learning is a technique that improves model performance by first “pre-training” a model on a related task, typi- cally where a lot of data is available [42].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Once the model converges on the first task, its weights are used as a starting point when train- ing on the target task.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Labeled data is required for both the source and target domains, although it is possible that there are fewer target examples.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In some cases, labeled data are missing for either the source or target domains.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' If source labels are unavailable, semi-supervised learning (SSL) can be applied to take advantage of unlabeled data to improve performance [9].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, WebUI doesn’t contain any labels for screen type (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', login screen, register screen), but we’d like to use it to improve prediction accuracy on a small number of annotated Android app screens.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In our work, we apply a form of SSL known as “self-learning” [9], where a UI classification model it- eratively improves its performance by generating pseudo-labels for an unlabeled dataset, then re-training itself using high-confidence samples.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Finally, to support use-cases where target labels are unavailable, we apply unsupervised domain adaptation (UDA) [22].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In many cases, visual UI models trained on web data can be directly used on any screenshot (including Android and iOS apps), and UDA improves the performance and robustness of models to domain changes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This type of knowledge transfer is particularly interesting because it enables us to explore the feasibility of new UI under- standing tasks (without manually annotating a large number of examples) and bring some benefits of web semantics (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', semantic HTML) to other platforms.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 3 WEBUI DATASET We introduce the WebUI dataset, which we construct and release to support UI modeling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The WebUI dataset is composed of 400,000 web pages automatically crawled from the web.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We stored screen- shots and corresponding metadata from the browser engine, which serve as annotations of UI element semantics.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Because the collec- tion process is highly automated, our final dataset is an order of Database Crawling Coordinator Crawler Web workers assign URLs to worker send back crawled URLs Request and collect data Figure 1: Overview of our crawling architecture.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' A crawl- ing coordinator contains a queue of URLs to crawl and as- signs them to workers in a crawler pool.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Workers asyn- chronously process URLs by visiting them in a automated browser, scraping relevant metadata, then uploading them to a cloud database.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' magnitude larger than other publicly available ones (Figure 4) and can be more easily updated over time.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In this section, we give an overview of our web crawling architec- ture, analyze the composition of our dataset, and provide evidence that it can support visual UI modeling for other platforms.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1 Web UI Crawler 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1 Crawling Architecture.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To collect our dataset, we implemented a parallelizable cloud-based web crawler.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our crawler consists of (i) a crawling coordinator server that keeps track of visited and queued URLs, (ii) a pool of crawler workers that scrapes URLs using a headless browser, and (iii) a database service that stores uploaded artifacts from the workers.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The crawler worker is implemented using a headless framework [3] for interfacing with the Chrome browser.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Each crawler worker repeatedly requests a URL from the coordinator server, which keeps global data structures for visited and upcoming URLs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The crawler worker includes some simple heuristics to automatically dismiss certain types of popups (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', GDPR cookie warnings) to help it access page content.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We seeded our coordinator using a list of websites that we hy- pothesized would lead to diverse examples of web pages (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', link aggregation websites and design blogs) and ones that we expected to have high-quality accessibility metadata (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', government websites).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' A full list of our seed websites can be found in the supplementary materials.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We explored several crawling policies and eventually settled on one that encourages diverse exploration by inversely weighting the probability of visiting a URL by its similarity to the visited set.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, if the crawler previously visited http://example.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com/user/ alpha, it would be less likely to subsequently visit http://example.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' com/user/beta.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We set a minimum probability so that it is possible to re-visit links to support additional types of analysis (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', temporal changes).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The coordinator organizes upcoming (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', queued) URLs by their hostname, (i) selects a hostname randomly with uniform probability, and then (ii) selects a URL using its assigned probability.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Empirically, we found this technique to be effective at avoiding CHI ’23, April 23–28, 2023, Hamburg, Germany Wu et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1280x720 1366x768 1536x864 1920x1080 iPhone iPad Figure 2: Screenshots from a web page accessed using 6 dif- ferent devices: 4 desktop resolutions, a smartphone, and a tablet.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' By requesting a responsive web page at different reso- lutions, we induce several layout variations (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', navigation and hero button).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' crawler traps, which are websites that cause automated crawlers to get stuck in endless loops navigating within the same site.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2 Data Collected from a Web Page.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We used a pool of crawler workers to crawl web pages in parallel, and we visited each URL with multiple simulated devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We collected several types of se- mantic information by querying the rendering and accessibility engine.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We set a timeout limit of 6 minutes for each URL, so some web pages were not visited by all simulated devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Simulated Devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We sampled each web page with 6 sim- ulated devices: 4 of the most common desktop resolutions [4], a tablet, and a mobile phone.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Devices are simulated by setting the browser window resolution and user agent to match the goal device, both of which may affect the page’s content and rendering.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Screenshots.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our crawler worker captured two types of screen- shots (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', visual data) from websites.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We captured a viewport screenshot, with fixed image dimensions, and a full-page screenshot, with variable height.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Images were saved using lossy compression to save storage.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' While compression can introduce some artifacts, previous work [19] suggests that the effect on deep learning model performance is minimal.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Accessibility Tree.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We used a browser automation library to query Chrome’s developer tools to retrieve an accessibility tree for each page [2].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The accessibility tree is a tree-based represen- tation of a web page that is shown to assistive technology, such as screen readers.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The tree contains accessibility objects, which usually correspond to UI elements and can be queried for properties (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', clickability, headings).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Compared to the DOM tree, the accessibility tree is simplified by removing redundant nodes (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=',
tags that are only used for styling) and automatically populated with semantic information via associated ARIA attributes or inferred from the node’s contents.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The browser generates the accessibility tree using a combination of HTML tags, ARIA attributes, and event listeners (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', click handlers) to create a more consistent semantic representation of the UI.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For instance, there are multiple ways to create a button (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', a styled div) and the accessibility tree is intended to unify all of these to a single button tag.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Layout and Computed Style.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For each element in the accessi- bility tree, we stored layout information from the rendering engine.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Specifically, we retrieved 4 bounding boxes relevant to the “box model”: (i) the content bounding box, (ii) the padding bounding # of elements (in thousands) 0 25000 50000 75000 100000 125000 text link list item image heading paragraph line break generic grid cell button Frequency of Common Element Types Figure 3: 10 most common element types in the WebUI dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Element types are based on automatically computed roles, which are not mutually exclusive.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Text is the most common type, but many types offer semantic information about what text is used for e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g, a heading, paragraph or link.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' # of UIs 0 100,000 200,000 300,000 400,000 500,000 Enrico VINS Clay Rico Screen Recognition Webzeitgeist WebUI UI Dataset Size Figure 4: Comparison of WebUI to existing UI datasets.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We- bUI contains nearly 400,000 web pages and is nearly one or- der of magnitude larger than existing datasets available for download (Enrico, VINS, Clay, Rico).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Each web page also con- tains multiple screenshots captured using 6 simulated de- vices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' box, (iii) the border bounding box, and (iv) the margin bounding box.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Each element was also associated with its computed style in- formation, which included font size, background color and other CSS properties.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2 Dataset Composition The WebUI dataset contains 400K web UIs captured over a period of 3 months and cost about $500 to crawl.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We grouped web pages together by their domain name, then generated training (70%), validation (10%), and testing (20%) splits.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This ensured that similar pages from the same website must appear in the same split.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We created four versions of the training dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Three of these splits were generated by randomly sampling a subset of the training split: Web-7k, Web-70k, Web-350k.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We chose 70k as a baseline size, since it is approximately the size of existing UI datasets [15, 55].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We also generated an additional split (Web-7k-Resampled) to provide a small, higher quality split for experimentation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Web-7k-Resampled was generated using a class-balancing sampling technique, and we removed screens with possible visual defects (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', very small, occluded, or invisible elements).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' More information about how this set was generated can be found in the appendix.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The validation and test split was always kept the same.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1 Comparison to Existing Datasets.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' WebUI is an order of magni- tude larger than existing datasets used for UI understanding (Figure 4) and provides rich semantic and style information not found in mobile datasets.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' WebUI focuses on the static properties of web pages and does not store page loading times or element animations.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 业Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat About Contribute ChannelNamespaces Guides FAQ Connect Providinga communityplatform forFreeand open- source software and peer directed projects.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Connectby pointingyour IRC clientto irc.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='chat:6697 (TLS) Choosing an IRC client Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat Channel Namespaces Happy Birthday, Libera Chat!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 19thMay2022byLiberastaff Helloeveryone,todaywe celebratethe anniversary of Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat going public!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='业Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat About Contribute Channel Namespaces Guides FAQ Connect Providing a community platform for free and open- source software and peer directed projects.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Connect by pointing your IRC client to irc.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='chat:6697 (TLS) Choosing anIRC client Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat Channel Namespaces Happy Birthday, Libera Chat!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 19thMay 2022 by Libera staff Hello everyone, today we celebrate the anniversary of Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat going public!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Wherewearecomingfrom Exactly one year ago Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat was unveiled as a real time communication and collaboration servicefor freeandopen-sourcesoftware,peer-directed projects, openly licensed content and collaboration.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Starting from scratch wemanaged, just within a fewmonths, tobecome the largest IRC network.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='业Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat About Contribute Channel Namespaces Guides FAQ Connect Providinga communityplatform forFreeandopen- source softwareandpeer directedprojects.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Connect bypointingyour IRC clientto irc.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='chat:6697 (TLS) Choosing an IRC client Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat Channel Namespaces Happy Birthday, Libera Chat!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 19thMay2022by Libera staff Hello everyone, today we celebrate the anniversary of Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat going public!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Wherewearecomingfrom业Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat About Contribute Channel Namespaces Guides FAQ Connect Providing a community platform For Free and open source software and peer directed projects.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Connect by pointing your IRC client to irc.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='chat:6697 (TLS) Choosing an IRC client Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat Channel Namespaces Happy Birthday, Libera Chat!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 19th May 2022 by Libera staff Hello everyone, today we celebrate the anniversary of Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat going public!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Wherewearecomingfrom Exactly one year ago Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat was unveiled as a real time communication and collaboration service for free and open-source software, peer-directed projects, openly licensed content and collaboration.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Starting from scratch we managed, just within a fewmonths, to become the largestIRC network.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Starting from scratch, we managed to gain around 5o o00 users in just a month and a half, a number which has been mostly steady since.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' with regard to channels we had roughly 15 00o channels formed within half a month, compared to the usercount this number is still growing, but the curve Flattened itself a bit.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' You can see detailed graphs over at https://netsplit.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='de/networks/statistics.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='php?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='net=Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat We also saw many communities and projects migrating over to Libera from other places in the first few days, counting 250 in just one week and 500 after a monthWLibera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat Navigation Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat Providing a community platform For Free and open-source software and peer directed projects.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Connect by pointing your IRC client to irc.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='chat:6697 (TLS) ChoosinganIRC client Channel Namespaces Happy Birthday, Libera Chat!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 19th May 2022 by Libera staffLibera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat About Contribute Channel Namespaces Guides FAQ Connect Providing a community platform for Free and open- source software and peer directed projects.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Connect by pointing your IRC client to irc.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='chat:6697 (TLS) Choosing an IRC client Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat Channel Namespaces Happy Birthday, Libera Chat!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 19th May 2022 by Libera staff Hello everyone, today we celebrate the anniversary of Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat going public!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Where we are coming from Exactly one year ago Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat was unveiled as a real time communication and collaboration service for Free and open-source software, peer-directed projects, openly licensed content and collaboration.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Starting From scratch we managed, just withinafewmonths,tobecomethelargestIRCnetwork.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Starting From scratch,we managed to gain around 5oooo users in just amonth and a half, a number which has been mostly steady since.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' With regard to channels we had roughly 15ooo channels formed within half a month, compared to the usercount this number is still growing,but the curve flattened itself a bit.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='You can see detailed graphs over at https://netsplit.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='de/networks/statistics.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='php?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='net=Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat We also saw many communities and projects migrating overto Libera from other places in the first fewdays, counting 25o in just one week and5o0 aftera month.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Today we are hosting roughly 95o projects and communities, and that number is still growing.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We are hoping to reach the 1oooth registration soon!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' All these communites are quitediverse.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat services arenot onlyused by major Free/open source operating systems and well known,world wide operating institutions such as the Wikimedia Foundation;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' we also have local Linux User Groups, the hackspace around the corner hacking on whimsical gadgets and liberating your hardware or someones scratch-your-own-itch image viewer that call Libera.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chat their home.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='WebUI: A Dataset for Enhancing Visual UI Understanding with Web Semantics CHI ’23, April 23–28, 2023, Hamburg, Germany We analyzed the makeup of web UIs and compared them to mobile UIs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The distribution of UI types (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Login, News, Search) in WebUI are also likely to be different than mobile data, since many web pages are primarily hypertext documents.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We extracted elements from the accessibility tree and categorized them using their computed accessibility role and the role of any singleton parents.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, a clickable image is created in HTML by surrounding an image (
![]()
) element with an anchor element (
).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Thus, it is possible for elements to be assigned to multiple classes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Figure 3 shows the frequency of element types in our dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Similar to prior work [55], we find that text is the most common element in our dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' However, we find limited overlap between the rest of the label set, possibly due to the nature of web data and the mutually exclusive nature of existing label sets.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' On average, there were 60 elements on a web UI, 30 of which were visible in the viewport.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This is more than the number of elements on mobile app screens, which prior work estimated to be around 25 per screen, although this may in part be due to differences in segmentation (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', a single Rich Text Field on Android can contain differently formatted text while on HTML they would broken up into different tags).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' On average, there were also more clickable elements per web page (20 on web pages vs 15 “interactable" elements on Android apps), likely due to the prevalence of hyperlinks on the web.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2 Dataset Quality.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Compared to manually labeled examples, automatically extracted annotations can contain errors that impact modeling performance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We conducted an analysis on a small, ran- domly sampled data from our dataset (1000 web pages).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' While there are numerous possible defects, we focus on three that we believe are most relevant to data quality: (i) element size, (ii) element occlusion, and (iii) website responsiveness.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our analysis is primarily focused on quantifying possible defects but not reparing them.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Previous work [30, 44] has explored automated methods for correcting mis- matched labels and occluded elements, and we expect the overall quality of WebUI could be improved if these were applied.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='. Element Size.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Element size refers to the dimensions of an anno- tated object in an image.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, if a bounding box annotation surrounds an object that is too small relative to the image resolution, it may be difficult for a model to identify the object.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The average area of bounding boxes in our data is approximately 14000𝑝𝑥2, but this may have been influenced by short segments of text.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The Web Content Accessibility Guidelines (WCAG) guideline for target size also recommends that interactable elements have a minimum size of 44 by 44 pixels, so that they can be easily selected by users.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In our dataset, one third of interactable elements (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', elements tagged as links or button) were smaller than this threshold.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Element Occlusion.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Element occlusion occurs when one object partially or completely covers another in a screenshot.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Occluded el- ements are detrimental to visual modeling since they may represent targets that can be impossible to predict correctly.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We quantified the occlusion rate by counting the number of screens with overlapping leaf elements.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We found that 18% of screens in our sampled split contained overlapping leaf elements.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' However, of the overlapping elements, only a third of them were occluded by more than 20% of their total area.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Responsive Websites.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Website responsiveness relates to how well a web page adapts to different screen viewports.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Since we sim- ulated multiple devices for each web page, responsive websites are likely to produce more variation in their layouts than unresponsive ones.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To measure responsiveness, we automatically computed met- rics included in the Chrome Lighthouse tool for estimating layout responsiveness: (i) responsiveness of content width to window size and (ii) the use of a viewport meta tag, which is needed for proper mobile rendering.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' From our analysis we found that 70% and 80% of processed web pages met the first, and second criteria, respectively.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In summary, our analysis suggests that most web pages in our dataset meet some basic requirements for visual UI modeling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Given the reliance of our data collection on extracted accessibility meta- data, we expect high quality examples to adhere to good accessibility practices, such as those outlined by WCAG.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' However, considering the inaccessibility of the web and that many criteria are difficult to verify automatically, we also expect many web pages to vio- late some of these criteria.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' There are other desirable properties for dataset quality that we did not check, e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', the accurate use of se- mantic HTML tags, ARIA tags, and tightness of element bounding boxes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' These properties were harder to verify automatically, since they require knowledge of developer intention and associated tasks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In our analysis, we only attempt to identify possible defects, and we did not attempt to remove or repair samples.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This could be a direction for future work to improve dataset quality [8, 30].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 4 TRANSFERRING SEMANTICS FROM WEB DATA We hypothesized that web data is similar and relevant to modeling other types of UIs from their pixels.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In this paper, we are specif- ically interested in the mobile domain, as mobile apps often lack metadata and can only be reliably understood from their visual appearance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In many cases, manually-annotated mobile datasets are small, and in some cases, labels are completely unavailable.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We used transfer learning to apply our dataset to three existing tasks in the UI understanding literature: (i) element detection, (ii) screen classification, and (iii) screen similarity.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Table 1 shows downstream applications where UI understanding tasks can benefit from web data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Because each task contains different constraints (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', presence of labeled target data) it is difficult to apply a single strategy to serve all use-cases.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, inductive transfer learning typi- cally requires labels in both the pre-training and fine-tuning phase is impossible to apply to a setting where target labels are unavail- able (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', screen similarity).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We expect our three transfer learning strategies to be applicable to most future use-cases, since they span all combinations of labeled data availability (Table 1).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1 Element Detection Element detection requires a machine learning model to identify the locations and types of UI elements from a screenshot.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Often these models are based on object detection frameworks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Element detection is an example of a task where labeled data is available in both the source and target domain (albeit fewer exam- ples of mobile screens), so it is possible to employ inductive transfer learning.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The WebUI dataset contains the locations of elements that we scraped from the website accessibility tree.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Element types are CHI ’23, April 23–28, 2023, Hamburg, Germany Wu et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Table 1: Table of strategies for transferring semantics from web pages to other types of UIs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We explored scenarios where labeled data is missing in either domain by applying three strategies: (i) finetuning, (ii) semi-supervised learning, and (iii) domain adaptation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Approach Finetuning Semi-supervised Learning Domain Adaptation Application Element Detection Screen Classification Screen Similarity Web (Source) Y N Y Mobile (Target) Y Y N Web Data VINS Element Detector Element Detector Step 1: Pre-training Step 3: Fine-tuning Step 2: Weight initialization Figure 5: We applied inductive transfer learning to improve the performance of a element detection model.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' First, we pre- trained the model on web pages to predict the location of nodes in the accessibility tree.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Then, we used the weights of the web model to initialize the downstream model.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Finally, we fine-tuned the downstream model on a smaller dataset consisting of mobile app screens.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' inferred from the HTML tags and the ARIA labels [2].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We show that this training strategy results in improvements to element detection performance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1 Model Implementation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We primarily followed the details provided by VINS [6] to implement our element detection model.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The VINS dataset, which we used for training, is composed of 4800 annotated UI screenshots from various sources such as design wireframes, Android apps, and iOS apps.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Since the authors did not release official data splits, we randomly partitioned the data into training (70%), validation (15%), and testing (15%) sets.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This specific split ratio was chosen since it has been used in other UI modeling work [50].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The paper identifies 11 primary UI component classes;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' however the released raw dataset includes a total of 22 class labels.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For the extraneous labels, we either tried to merge them with the 11 primary labels (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', “Remember Me" merged with “Check Box") or assigned them to an “Other" class (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', “Map") if no good fit was found.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Instead of the SSD object detection model [38] used by VINS, we opted to start from the more recent FCOS model architecture [48], since we found it was easier to modify to support multi-label training.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Previous element detection work [6, 12, 55] trained models to assign one class label (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', Button, Text field) to each detected element in the screenshot.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To take advantage of multiple, nested definitions of web elements in our dataset, we trained the object detection model to predict multiple labels for each bounding box.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Figure 5 illustrates the overall training process.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In the pre-training phase, the element detection model is trained on a split of the We- bUI dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Due to cost and time constraints, we trained all element detection models for a maximum of 5 days.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We also used early stop- ping on the validation metric to reduce the chance of overfitting.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Afterwards, a specific part of the model was re-initialized (the ob- ject classification head) to match the number of classes in the VINS dataset before it was fine-tuned.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We found it difficult to modify the original SSD architecture to support the multi-label pre-training, so we only followed the original training from scratch procedure described in the paper as a baseline.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2 Results.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Table 2 shows the performance of each model con- figuration on the VINS test set, and we show that our updated configurations lead to significant performance improvements.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our primary performance metric for this task was the mean average pre- cision (mAP), which is a standard metric used for object detection models that takes into the accuracy of bounding box location (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', how closely the predicted box overlaps with ground truth) and clas- sification (prediction of object type).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The mAP score is calculated by computing an individual average precision (AP) score for each possible element class (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', Text, Check Box), which represents the object detector’s accuracy in detecting each object class.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The AP scores are averaged to produce the mAP score.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We calculated the mAP score over classes that could be mapped to the original label set in the paper [6] i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', we excluded the “Other" class where there was no clear mapping to the original set.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We calculated the un- weighted mean between class APs, which assigns equal importance to common and rare element types.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our best model configuration performed 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='14 better than the baseline in terms of mAP score.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' While the largest source of improvement over the baseline con- figuration (SSD) came from the updated FCOS model architecture, our fine-tuning procedure contributed to gains as well.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Specifi- cally, we note that pre-training with more examples led to better performance (around 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='04 mAP).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Depending on the downstream application of the element detection model, this improvement could lead to better user experience but would require further validation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, a screen reader [55] does not require tight bounding boxes;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' however, it would benefit from detecting more (small) el- ements on the screen.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Query-based design search [6] could also retrieve more relevant examples.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Although we followed the original training procedure as closely as possible, we were unable to reach the mAP score reported in the original VINS paper.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This can be attributed to (i) our use of different randomized splits and (ii) differences in mappings between class labels from the raw data to the 11 primary classes, which were not provided in the previously released code.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Nevertheless, since we used the same splits and class mappings across all of our model configurations, we expect the relative performance improvements to be consistent.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We also investigated the zero-shot performance of element de- tectors trained only on web data (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', without fine-tuning).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' It is difficult to compute performance quantitatively, since the label sets WebUI: A Dataset for Enhancing Visual UI Understanding with Web Semantics CHI ’23, April 23–28, 2023, Hamburg, Germany Table 2: Element detection performance (11 object classes) for different model configurations.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Pre-training on more web screens led to better performance on mobile screens af- ter fine-tuning.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Model Configuration mAP SSD (Random Init.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=') 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='6737 FCOS (Random Init.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=') 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7739 FCOS (Pre-trained on Web7k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7877 FCOS (Pre-trained on Web7k-Resampled) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7961 FCOS (Pre-trained on Web70k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7921 FCOS (Pre-trained on Web350k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='8115 between the web and mobile datasets do not directly overlap.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' How- ever, we provide qualitative evidence that zero-shot learning could be successful.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Figure 6 shows the output of a web model when run on mobile app screens from Rico.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We conducted minimal prepro- cessing, such as cropping out the Android system notification bar and the navigation soft buttons.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In many cases, the web analogs of mobile text and image elements are detected accurately, which suggests that some element classes have consistent appearance across platforms.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Interestingly, some web classes such as links and headings are also detected in the image, which could be used to infer new semantics such as clickability [47] and navigation landmarks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2 Screen Classification Classifying screen type or functionality from a screenshot can be useful for design analysis and automation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Previously, small amounts of data have been collected and annotated for this purpose.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Enrico [29] is an example of a dataset (1460 samples, subset of Rico [15]) where each screenshot is assigned to one of 20 mutually- exclusive design categories.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Because of the dataset’s small size, it is challenging to train accurate deep learning classification models.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' While our web dataset is large, it also does not have the screen- type annotations, and thus it is not possible to employ the same pre-training strategy that was used for element detection.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Instead, we applied a semi-supervised learning technique known as self-training [9].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Self-training is a process that improves model performance by iteratively labeling and re-training on a large source of unlabeled data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We investigated the effects of using WebUI as the unlabeled dataset and show that doing so improves overall screen classification accuracy.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1 Model Implementation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Figure 7 shows our procedure for incorporating WebUI data into our model training via self-training.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' First, we trained screen classifier based on the VGG-16 archi- tecture with batch normalization and dropout [45], as described by the Enrico paper [29].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Since official training, validation, and testing splits were not provided, we randomly generated our own (70%/15%/15%).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This model was trained only on data from the Enrico training split and served as the teacher classifier.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Next, the teacher model was used to generate “soft" pseudo-labels for screenshots in the WebUI dataset, where each sample was mapped to a vector containing probabilities for each class.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We followed the procedure used by Yalniz et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [53] to keep only the top K most confident Table 3: Classification accuracy (across 20 classes) for dif- ferent configurations of our screen classification model.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In- creasing the amount of data used with our semi-supervised learning method led to increased accuracy.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Model Configuration Accuracy VGG-16 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='4737 Noisy ResNet-50 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='4649 Noisy ResNet-50 (Rico) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='4956 Noisy ResNet-50 (Web7k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='4864 Noisy ResNet-50 (Web7k-Resampled) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='4868 Noisy ResNet-50 (Web70k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='5175 Noisy ResNet-50 (Web350k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='5263 labels for each class.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To select K, we first randomly sampled a small subset of 1000 web pages from our dataset and performed a param- eter search to find the optimal value.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Based on our experiments, we found that a value of 10% of the total dataset size led to good performance (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', we set K=700 for the Web-7k split).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Finally, we trained a student classifier on a combination of the original and automatically generated labels.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We employed a specific type of self-training known as Noisy Student Training [52], which involves injecting noise into the student model’s training process so that it becomes more robust.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Two types of noise are used in this process: (i) input noise, which is implemented via random data augmenta- tion techniques and (ii) model noise, which is implemented with dropout [46] and stochastic depth [27].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Because stochastic depth can only be applied to model architectures with residual blocks, we used an architecture based on ResNet-50 [25] instead of VGG-16.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2 Results.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Overall, we found that applying self-training to in- corporate additional unlabeled data led to consistent performance improvements (Table 3).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The best classifier using WebUI data was 5% more accurate than the baseline model, which was only trained with the Enrico dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our baseline VGG-16 model performed considerably worse than the originally reported results [29] but achieved similar accuracy to another reproduction of the work [35].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The performance difference could be attributed to differences in randomized splits.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Since we used the same splits across all condi- tions, we expect relative performance differences to be consistent.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To investigate the effects of using a new model architecture, we also trained a Noisy ResNet-50 (architecture used by the student model) on the Enrico dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The resulting classifier performed relatively poorly (worse than the baseline model), since the modifications introduced (dropout and stochastic depth) require more data to train effectively.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The primary source of improvement stems from the inclusion of additional unlabeled data during the training process, which led to a more generalizable student model.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We observed that the small size of the Enrico dataset (1460 samples) quickly led to overfitting during training and limited overall performance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Semi-supervised learning techniques, such as self-training, allow training on a much larger number of examples.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We found that model accuracy improved when we incorporated more unlabeled examples, both from WebUI and Rico.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' CHI ’23, April 23–28, 2023, Hamburg, Germany Wu et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Figure 6: Output of our element detection models run on two app screens.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In many cases, detections from our web-only model (Blue) coincide with ones from our fine-tuned model (Orange), which suggests some zero-shot transfer capabilities.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Predicted tags from the web-only model also provide additional metadata corresponding to clickability (link) and heading prediction (heading);' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' however, the predicted bounding boxes are often less tight than the fine-tuned model.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Enrico Web Data Teacher Classifier Student Classifier Step 1: Training Step 3: Noisy Training Step 2: Pseudo-labels Figure 7: We applied semi-supervised learning to boost screen classification performance using unlabeled web data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' First, a teacher classifier is trained using a “gold" dataset of labeled mobile screens.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Then, the teacher classifier is used to generate a “silver" dataset of pseudo-labels by running it on a large, unlabeled data source (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', web data).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Finally, the “gold" and “silver" datasets are combined when training a student classifier, which is larger and regularized with noise to improve generalization.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This process can be repeated;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' however, we only perform one iteration.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='3 Screen Similarity Web Data Similarity Model RICO UI Similarity Mobile Examples Unsup.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Domain Adaptation Figure 8: We used unsupervised domain adaptation (UDA) to train a screen similarity model that predicts relationships between pairs of web pages and mobile app screens.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The training uses web data to learn similarity between screen- shots using their associated URLs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Unlabeled data from Rico is used to train an domain-adversarial network, which guides the main model to learn features that transferrable from web pages to mobile screens.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Identifying variations within the same screen and detecting tran- sitions to new screens are useful for replaying user interaction traces, processing bug reports [13], and automated app testing [33, 34].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To model these properties and understand how multiple screens from an application relate to each other, previous work [20, 34] has sought to differentiate between distinct UIs and varia- tions of the same UI.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, the same checkout screen may appear different based on the number and types of products added to the cart.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Common screen interactions such as scrolling and in- teraction with expandable widgets (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', menus, dialogs, keyboards, and notifications) may also alter the visual appearance of a screen.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Visual prediction reduces system reliance on accessibility metadata, which may be missing or incomplete, and further extends the ap- plications of these models, as they can process video recordings of user interactions (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', reproducing bug reports) [5, 13].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Previous work [20] opted to manually annotate a dataset of more than one thousand iPhone applications that were manually “crawled" by crowdworkers;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' however, the dataset was not released to the public.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' As a weak source of annotation, we used web page URLs to automatically label page relations.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Since no labeled data is available in the mobile domain, we employed domain-adversarial network training [22], a type of unsupervised domain adaptation (UDA), to encourage the model to learn transferrable features from the web domain that might apply to the mobile domain.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Note that while it is possible to apply the semi-supervised learning strategy (which was used for the screen classification task) in reverse, it may be less effective, since the unlabeled dataset (mobile UIs) is smaller than the labeled dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1 Model Implementation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We followed previous work [20] and used a ResNet-18 [25] model trained as a siamese network [24].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The siamese network uses the same model to encode two inputs, then compares them in feature space (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', their embeddings) to decide if they are different variations of the same UI screen.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our approach is different from the method proposed by previous work [13], which img Weather Alert SevereThunderstorm inyour.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='city.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Text,heading Text,paragraph sendyouweatheralertsbasedon ooatio rowid img Text,heading vedlleAle Text AlertiLocatior Text,button DoneImage WeatherAlert SevereThunderstorm inyour.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='city.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Never miss a weather alert Text Wewouldalsoliketosendyouweatneralertsbasedona locationormultiplelocationsyouprovide Switch Text veatner Alers Text Text Icon ocatior anFanciscoA Text Button Doneimg,link,Text Text,heading Sun & Moon Text,heading Amino img Loving Pokemon sun and moon?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Thenjointhiscommunity Text,link Sign Up Text Log In Text JuPyuuT .' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='+ I:.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='-I.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' vicend PrIvacvIPolcIIcon Image Text bun & Moon Text m Image Loving Pokemon sun and moon?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="Thenjointhiscommunity Text Button Sign Up 4x Text ainnok'dn6imm ermeof Service andPrivacvPoliciWebUI: A Dataset for Enhancing Visual UI Understanding with Web Semantics CHI ’23, April 23–28, 2023, Hamburg, Germany Table 4: Classification performance (same-screen vs new- screen) of our screen similarity models evaluated on pairs of screens from our web data." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Performance increased when the model was trained on more data and slightly decreased when trained with the UDA objective.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Model Configuration F1-Score ResNet-18 (Web7k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7097 ResNet-18 UDA (Web7k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7184 ResNet-18 (Web7k-Resampled) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7368 ResNet-18 UDA (Web7k-Resampled) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7191 ResNet-18 (Web70k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='8222 ResNet-18 UDA (Web70k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='8193 ResNet-18 (Web350k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='9630 ResNet-18 UDA (Web350k) 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='9500 applies random data augmentations (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', blurring, rotation, trans- lation) to screenshots to create same-screen pairs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Instead, we ran- domly sampled pairs of screenshots from our web data for training, with balanced probability for same-screen and new-screen pairs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Same-screen pairs were generated by finding screenshots with the same URL but accessed at different times or simulating page scrolls on a full-page screen capture by sliding a window vertically along the image.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Note that occasionally, simulated page scrolls and access- ing the same web page at different times still produced identical or nearly identical screenshots, so in our test set, we filtered these out using perceptual hashing.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Different-screen pairs were generated both by sampling screenshots from within the same domain but with different URL path, and by sampling screenshots from other domains.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The domain-adversarial training process seeks to simultaneously accomplish two objectives: (i) learn an embedding space where two screenshots are from the same screen if their distance is less than a threshold, and (ii) learn an encoding function that applies to both the web and mobile domains.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The first objective is related to the primary task of distinguishing same-screen pairs from new-screen pairs and is achieved with a pairwise margin-based loss [20].' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The second objective aims to align the feature distributions of the two domains by maximizing the error rate of a domain classifier, which is a network that tries to classify whether a sample is from a web or mobile UI.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For this task, we used only web page screenshots cap- tured on simulated smartphones, to make the domain classification objective more challenging.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2 Results.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Since one of the assumptions of our problem is that labeled examples of same-screen and new-screen pairs are unavail- able for mobile apps, we used two alternative methods to evaluate our screen similarity model: (i) quantitative evaluation on labeled pairs of web screens and (ii) qualitative evaluation on a set of unla- beled Android interaction videos.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Table 4 shows the quantitative performance of our models evalu- ated on pairs of web pages from our dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Overall, training with more data led to significantly better performance, an increase of over 20%.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The inclusion of a domain adaptation objective sometimes led to a slight drop in classification performance since it introduces additional constraints in the learning process.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We qualitatively eval- uated our model’s performance characteristics on mobile screens by using them to segment videos of mobile app interaction.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We used a dataset of screen recordings of bug reproductions [13] for 6 open-source Android apps and applied our model by sequentially sampling frames from the video and evaluating whether a new screen was reached.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Note our sampling process differs from other previous work [7, 15] that segmented crawls at recording time us- ing accessibility metadata, because we do not have accessibility metadata corresponding to the previously collected recordings used in our analysis.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Figure 9 shows an example of a usage video pro- cessed by our model.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' While the web model was effective detecting some types of transitions that occurred in mobile apps, it was less effective at others, such as software keyboards and dialogs, which do not occur frequently in the WebUI dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We include more model-generated segmentations of the bug reproduction dataset in supplementary material.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In this work, we applied unsupervised domain adaptation, which does not require any labels from the target domain.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Other domain adaptation strategies exist, and some are able to incorporate small amounts of labeled data, which we expect could improve the accu- racy of our model by contributing transition types unique to mobile apps.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 5 DISCUSSION 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1 Performance Impact of Web Data Empirically, we showed that automatically crawled and annotated web pages, like those available in WebUI, can effectively support common visual modeling tasks for other domains (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', mobile apps) through transfer learning strategies.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In cases where a small amount of labeled mobile data was available, as in element detection and screen classification, incorporating web data led to better perfor- mance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Even when labeled data was completely unavailable, as in screen similarity, models trained only on web data could often be directly applied to mobile app screens.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our results suggest that the size of current UI datasets may be a limiting factor, since model performance increases consistently when trained on larger splits of data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our observations and analysis of WebUI’s composition showed that web pages can differ from mobile app screens in terms of complexity (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', average number of on-screen elements) and element types.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' However, the performance improvements from our machine learning experiments suggest that web and mobile UIs are similar enough to transfer some types of semantics between them.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We currently only explored three examples, although we believe that other UI modeling works [11, 47, 50] can also benefit from similar approaches.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We did not evaluate all possible applications of WebUI in our paper, due to time and cost constraints.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' However, the three experiments we conducted cover all possibilities of source and target domain labels (1), so similar transfer learning techniques are likely to apply.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Future work that builds upon WebUI can conduct more detailed evaluations of other downstream tasks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' One specific area that we believe is promising for future work is automated design verification [41], which could benefit from a large volume of web pages containing paired visual and stylistic information.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our highly automated data collection process also allows WebUI to be more easily updated in the future by re-visiting CHI ’23, April 23–28, 2023, Hamburg, Germany Wu et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Droid Weight AntennaPod Time Tracker 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 Token GrowTracker GNUCash Figure 9: Examples of interaction videos segmented by our best models trained with UDA (Red) and without UDA (Blue).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Videos are sampled at 1 fps.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The output of both models contain errors, however, we found that the adapted UDA model generally produced better segmentations.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Common errors include oversegmentation due to app dialogs and soft keyboards, which do not occur in the WebUI dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' the same list of URLs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' An updated version of the dataset could also facilitate longitudinal analysis of the design [14] and accessibil- ity [21] of web UIs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Nevertheless, WebUI is currently unlikely to support other types of modeling, such as user interaction mining [15, 16], that require realistic interaction traces, since our crawling strategy was largely based on random link traversal.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2 Improved Automated Crawling Our crawler was unable to access much of the “deep web" (i.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', large part of the web that cannot be indexed), and thus our dataset contains few, if any, web pages that are not publicly accessible or protected by authentication flows.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' It also did not attempt to interact with all elements on a web page and conducted a very limited exploration of any JavaScript-enabled functionality that might have been present.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Trends in web and app development, such as the creation of Progressive Web Apps (PWAs), suggest that this type of functionality will become more common, and traditional link-based traversal may become less effective at exploring UI states.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To improve automated crawling and data collection, our crawler could benefit from a semantic understanding of web pages.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, it could detect page functionality to explore states that require human input and either execute automated routines (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' detecting login fields) or employ crowdsourcing [15] to allow it to proceed in more complex scenarios.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our currently trained models could augment or improve this process by identifying tasks associ- ated with web pages (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', screen classification) or by augmenting potentially noisy labels provided by the automatically generated accessibility tree.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In turn, the crawler could explore more of the web, leading to higher quality and more diverse data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' If repeated iteratively, this process would constitute a form of Never-Ending Learning [39], a machine learning paradigm where models learn continuously over long periods of time.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Instead of learning from a fixed dataset, models could constantly improve itself by encounter- ing new content and designs, both of which are important due to the dynamic nature of UIs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 5.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='3 Generalized UI Understanding Our experiments show that incorporating web data is most effec- tive for improving visual UI modeling in transfer learning settings where a limited amount of target labels are available for fine-tuning.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' A logical next step is to obtain similar benefits without any addi- tional labeled data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' To this end, we identified several strategies for improving generalization.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' First, unlike existing UI datasets that contain examples from one device type, we intentionally simulated multiple viewports and devices during data collection.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The decom- position of one-hot labels (where each element type is assigned <23> 31 《23> 23 A welan Gime Trac Add tas Add ta: Add tas Ad ask ① 7123:hange三 三 三 三 三 Fn,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' May 15 Fr,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' May 1 5 15:21 ay 20 0 7123WebUI: A Dataset for Enhancing Visual UI Understanding with Web Semantics CHI ’23,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' April 23–28,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2023,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Hamburg,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Germany exactly one type) into combinations of multi-hot tags (each element can be assigned multiple labels) may also be useful,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' since it avoids the problem of platform-specific element types.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Figure 6 demon- strates the zero-shot transfer capabilities of models trained only on web data by successfully detecting and classifying elements on Android app screens.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' While the label sets of web and Android data do not directly overlap, the web model outputs reasonable analogs (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', Text, link) for Android widgets (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', Text Button).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Finally, our screen similarity model shows how unsupervised domain adaptation can improve the transferrability of learned features across domains through an explicit machine learning objective.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' A long-term goal of our automated data collection and modeling efforts is achieving a more generalized understanding of UIs — a single model that could be used to predict semantics for any UI.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' This is challenging due to differing design guidelines and paradigms, but it could ultimately lead to a better understanding of how to solve UI problems across platforms.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 6 CONCLUSION In this paper, we introduced WebUI, a dataset of approximately 400,000 web pages paired with visual, semantic, and style informa- tion to support visual UI modeling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Unlike most existing datasets for UI research that depend on costly and time-consuming human ex- ploration and annotation, WebUI was collected with a web crawler that uses existing metadata, such as the accessibility tree and com- puted styles, as noisy labels for visual prediction.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our highly auto- mated process allowed us to collect an order of magnitude more UIs than other publicly released datasets and often associates more information (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', clickability, responsiveness) with each example.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We demonstrated the utility of our dataset by incorporating it into three visual UI modeling tasks in the mobile domain: (i) element de- tection, (ii) screen classification, and (iii) screen similarity.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In cases where a small amount of labeled mobile data exists, incorporating web data led to increased performance, and in cases without any labeled mobile data, we found that models trained on web pages could often generalize to mobile app screens.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In summary, our work shows that the web constitutes a large source of data that can more sustainably be crawled and mined for supporting visual UI research and modeling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' ACKNOWLEDGMENTS This work was funded in part by an NSF Graduate Research Fel- lowship.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' REFERENCES [1] 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' AutoIt Function PixelSearch.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' https://www.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='autoitscript.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com/autoit3/docs/ functions/PixelSearch.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='htm.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [2] 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Chrome DevTools engineering blog Full Accessibility Tree in Chrome DevTools.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' https://developer.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='chrome.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com/blog/full-accessibility-tree/.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Accessed: 2022-09-15.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [3] 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Puppeteer - Chrome.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' https://developer.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='chrome.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com/docs/puppeteer/.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Ac- cessed: 2022-09-15.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [4] 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' What is the ideal screen size for responsive design?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' https://www.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' browserstack.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com/guide/ideal-screen-sizes-for-responsive-design.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Accessed: 2022-09-15.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [5] Carlos Bernal-Cárdenas, Nathan Cooper, Madeleine Havranek, Kevin Moran, Oscar Chaparro, Denys Poshyvanyk, and Andrian Marcus.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Translating Video Recordings of Complex Mobile App UI Gestures Into Replayable Scenarios.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' IEEE Transactions on Software Engineering (2022).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [6] Sara Bunian, Kai Li, Chaima Jemmali, Casper Harteveld, Yun Fu, and Magy Seif Seif El-Nasr.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Vins: Visual search for mobile user interface design.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Pro- ceedings of the 2021 CHI Conference on Human Factors in Computing Systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1–14.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [7] Andrea Burns, Deniz Arsan, Sanjna Agrawal, Ranjitha Kumar, Kate Saenko, and Bryan A Plummer.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Interactive Mobile App Navigation with Uncertain or Under-specified Natural Language Commands.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' arXiv preprint arXiv:2202.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='02312 (2022).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [8] Tsung-Hsiang Chang, Tom Yeh, and Rob Miller.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2011.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Associating the visual representation of user interfaces with their internal structures and metadata.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 24th annual ACM symposium on User interface software and technology.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 245–256.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [9] Olivier Chapelle, Bernhard Scholkopf, and Alexander Zien.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2009.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Semi-supervised learning.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' IEEE Transactions on Neural Networks 20, 3 (2009), 542–542.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [10] Nitesh V Chawla, Kevin W Bowyer, Lawrence O Hall, and W Philip Kegelmeyer.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2002.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' SMOTE: synthetic minority over-sampling technique.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Journal of artificial intelligence research 16 (2002), 321–357.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [11] Jieshan Chen, Amanda Swearngin, Jason Wu, Titus Barik, Jeffrey Nichols, and Xiaoyi Zhang.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Towards Complete Icon Labeling in Mobile Applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In CHI Conference on Human Factors in Computing Systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1–14.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [12] Jieshan Chen, Mulong Xie, Zhenchang Xing, Chunyang Chen, Xiwei Xu, Liming Zhu, and Guoqiang Li.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Object detection for graphical user interface: Old fashioned or deep learning or a combination?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='. In proceedings of the 28th ACM joint meeting on European Software Engineering Conference and Symposium on the Foundations of Software Engineering.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1202–1214.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [13] Nathan Cooper, Carlos Bernal-Cárdenas, Oscar Chaparro, Kevin Moran, and Denys Poshyvanyk.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' It takes two to tango: Combining visual and textual information for detecting duplicate video-based bug reports.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In 2021 IEEE/ACM 43rd International Conference on Software Engineering (ICSE).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' IEEE, 957–969.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [14] Biplab Deka, Bardia Doosti, Forrest Huang, Chad Franzen, Joshua Hibschman, Daniel Afergan, Yang Li, Ranjitha Kumar, Tao Dong, and Jeffrey Nichols.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' An Early Rico Retrospective: Three Years of Uses for a Mobile App Dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Artificial Intelligence for Human Computer Interaction: A Modern Approach.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Springer, 229–256.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [15] Biplab Deka, Zifeng Huang, Chad Franzen, Joshua Hibschman, Daniel Afergan, Yang Li, Jeffrey Nichols, and Ranjitha Kumar.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2017.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Rico: A mobile app dataset for building data-driven design applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 30th Annual ACM Symposium on User Interface Software and Technology.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 845–854.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [16] Biplab Deka, Zifeng Huang, and Ranjitha Kumar.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2016.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' ERICA: Interaction mining mobile apps.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 29th annual symposium on user interface software and technology.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 767–776.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [17] Jia Deng, Wei Dong, Richard Socher, Li-Jia Li, Kai Li, and Li Fei-Fei.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2009.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Imagenet: A large-scale hierarchical image database.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In 2009 IEEE conference on computer vision and pattern recognition.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Ieee, 248–255.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [18] Morgan Dixon and James Fogarty.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2010.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Prefab: implementing advanced behav- iors using pixel-based reverse engineering of interface structure.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the SIGCHI Conference on Human Factors in Computing Systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1525–1534.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [19] Samuel Dodge and Lina Karam.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2016.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Understanding how image quality af- fects deep neural networks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In 2016 eighth international conference on quality of multimedia experience (QoMEX).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' IEEE, 1–6.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [20] Shirin Feiz, Jason Wu, Xiaoyi Zhang, Amanda Swearngin, Titus Barik, and Jeffrey Nichols.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Understanding Screen Relationships from Screenshots of Smart- phone Applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In 27th International Conference on Intelligent User Interfaces.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 447–458.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [21] Raymond Fok, Mingyuan Zhong, Anne Spencer Ross, James Fogarty, and Ja- cob O Wobbrock.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' A Large-Scale Longitudinal Analysis of Missing Label Accessibility Failures in Android Apps.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In CHI Conference on Human Factors in Computing Systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1–16.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [22] Yaroslav Ganin, Evgeniya Ustinova, Hana Ajakan, Pascal Germain, Hugo Larochelle, François Laviolette, Mario Marchand, and Victor Lempitsky.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2016.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Domain-adversarial training of neural networks.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The journal of machine learning research 17, 1 (2016), 2096–2030.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [23] Leo Gao, Stella Biderman, Sid Black, Laurence Golding, Travis Hoppe, Charles Foster, Jason Phang, Horace He, Anish Thite, Noa Nabeshima, et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The pile: An 800gb dataset of diverse text for language modeling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' arXiv preprint arXiv:2101.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='00027 (2020).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [24] Raia Hadsell, Sumit Chopra, and Yann LeCun.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2006.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Dimensionality reduction by learning an invariant mapping.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In 2006 IEEE Computer Society Conference on Computer Vision and Pattern Recognition (CVPR’06), Vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' IEEE, 1735–1742.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [25] Kaiming He, Xiangyu Zhang, Shaoqing Ren, and Jian Sun.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2016.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Deep residual learning for image recognition.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the IEEE conference on computer vision and pattern recognition.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 770–778.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [26] Zecheng He, Srinivas Sunkara, Xiaoxue Zang, Ying Xu, Lijuan Liu, Nevan Wich- ers, Gabriel Schubiner, Ruby Lee, and Jindong Chen.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Actionbert: Leveraging user actions for semantic understanding of user interfaces.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the AAAI Conference on Artificial Intelligence, Vol.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 35.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 5931–5938.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [27] Gao Huang, Yu Sun, Zhuang Liu, Daniel Sedra, and Kilian Q Weinberger.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2016.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Deep networks with stochastic depth.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In European conference on computer vision.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' CHI ’23, April 23–28, 2023, Hamburg, Germany Wu et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Springer, 646–661.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [28] Ranjitha Kumar, Arvind Satyanarayan, Cesar Torres, Maxine Lim, Salman Ahmad, Scott R Klemmer, and Jerry O Talton.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2013.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Webzeitgeist: design mining the web.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the SIGCHI Conference on Human Factors in Computing Systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' ACM, New York, NY, USA, 3083–3092.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [29] Luis A Leiva, Asutosh Hota, and Antti Oulasvirta.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Enrico: A dataset for topic modeling of mobile UI designs.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In 22nd International Conference on Human- Computer Interaction with Mobile Devices and Services.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1–4.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [30] Gang Li, Gilles Baechler, Manuel Tragut, and Yang Li.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2022.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Learning to Denoise Raw Mobile UI Layouts for Improving Datasets at Scale.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In CHI Conference on Human Factors in Computing Systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1–13.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [31] Toby Jia-Jun Li, Amos Azaria, and Brad A Myers.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2017.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' SUGILITE: creating multimodal smartphone automation by demonstration.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 2017 CHI conference on human factors in computing systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 6038–6049.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [32] Yang Li, Jiacong He, Xin Zhou, Yuan Zhang, and Jason Baldridge.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Mapping natural language instructions to mobile UI action sequences.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' arXiv preprint arXiv:2005.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='03776 (2020).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [33] Yuanchun Li, Ziyue Yang, Yao Guo, and Xiangqun Chen.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2017.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Droidbot: a lightweight ui-guided test input generator for android.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In 2017 IEEE/ACM 39th International Conference on Software Engineering Companion (ICSE-C).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' IEEE, 23– 26.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [34] Yuanchun Li, Ziyue Yang, Yao Guo, and Xiangqun Chen.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Humanoid: A deep learning-based approach to automated black-box android app testing.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In 2019 34th IEEE/ACM International Conference on Automated Software Engineering (ASE).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' IEEE, 1070–1073.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [35] Paul Pu Liang, Yiwei Lyu, Xiang Fan, Zetian Wu, Yun Cheng, Jason Wu, Leslie Chen, Peter Wu, Michelle A Lee, Yuke Zhu, et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Multibench: Mul- tiscale benchmarks for multimodal representation learning.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' arXiv preprint arXiv:2107.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='07502 (2021).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [36] Tsung-Yi Lin, Michael Maire, Serge Belongie, James Hays, Pietro Perona, Deva Ramanan, Piotr Dollár, and C Lawrence Zitnick.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2014.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Microsoft coco: Common objects in context.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In European conference on computer vision.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Springer, 740–755.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [37] Thomas F Liu, Mark Craft, Jason Situ, Ersin Yumer, Radomir Mech, and Ranjitha Kumar.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2018.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Learning design semantics for mobile apps.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 31st Annual ACM Symposium on User Interface Software and Technology.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 569–579.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [38] Wei Liu, Dragomir Anguelov, Dumitru Erhan, Christian Szegedy, Scott Reed, Cheng-Yang Fu, and Alexander C Berg.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2016.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Ssd: Single shot multibox detector.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In European conference on computer vision.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Springer, 21–37.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [39] Tom Mitchell, William Cohen, Estevam Hruschka, Partha Talukdar, Bishan Yang, Justin Betteridge, Andrew Carlson, Bhavana Dalvi, Matt Gardner, Bryan Kisiel, et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2018.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Never-ending learning.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Commun.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' ACM 61, 5 (2018), 103–115.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [40] Kevin Moran, Carlos Bernal-Cárdenas, Michael Curcio, Richard Bonett, and Denys Poshyvanyk.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2018.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Machine learning-based prototyping of graphical user interfaces for mobile apps.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' IEEE Transactions on Software Engineering 46, 2 (2018), 196–221.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [41] Kevin Moran, Boyang Li, Carlos Bernal-Cárdenas, Dan Jelf, and Denys Poshy- vanyk.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2018.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Automated reporting of GUI design violations for mobile apps.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 40th International Conference on Software Engineering.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 165–175.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [42] Sinno Jialin Pan and Qiang Yang.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2009.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' A survey on transfer learning.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' IEEE Transactions on knowledge and data engineering 22, 10 (2009), 1345–1359.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [43] Richard S.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Schwerdtfeger.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1991.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Making the GUI Talk.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' ftp://service.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='boulder.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='ibm.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' com/sns/sr-os2/sr2doc/guitalk.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='txt.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [44] Vinoth Pandian Sermuga Pandian, Sarah Suleri, and Matthias Jarke.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Synz: Enhanced synthetic dataset for training ui element detectors.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In 26th International Conference on Intelligent User Interfaces-Companion.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 67–69.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [45] Karen Simonyan and Andrew Zisserman.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2014.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Very deep convolutional networks for large-scale image recognition.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' arXiv preprint arXiv:1409.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1556 (2014).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [46] Nitish Srivastava, Geoffrey Hinton, Alex Krizhevsky, Ilya Sutskever, and Ruslan Salakhutdinov.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2014.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Dropout: a simple way to prevent neural networks from overfitting.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The journal of machine learning research 15, 1 (2014), 1929–1958.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [47] Amanda Swearngin and Yang Li.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Modeling mobile interface tappability using crowdsourcing and deep learning.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 2019 CHI Conference on Human Factors in Computing Systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1–11.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [48] Zhi Tian, Chunhua Shen, Hao Chen, and Tong He.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Fcos: Fully convolutional one-stage object detection.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the IEEE/CVF international conference on computer vision.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 9627–9636.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [49] Bryan Wang, Gang Li, Xin Zhou, Zhourong Chen, Tovi Grossman, and Yang Li.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Screen2words: Automatic mobile UI summarization with multimodal learning.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In The 34th Annual ACM Symposium on User Interface Software and Technology.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 498–510.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [50] Jason Wu, Xiaoyi Zhang, Jeff Nichols, and Jeffrey P Bigham.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Screen Parsing: Towards Reverse Engineering of UI Models from Screenshots.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In The 34th Annual ACM Symposium on User Interface Software and Technology.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 470–483.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [51] Mulong Xie, Sidong Feng, Zhenchang Xing, Jieshan Chen, and Chunyang Chen.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' UIED: a hybrid tool for GUI element detection.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 28th ACM Joint Meeting on European Software Engineering Conference and Symposium on the Foundations of Software Engineering.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1655–1659.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [52] Qizhe Xie, Minh-Thang Luong, Eduard Hovy, and Quoc V Le.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2020.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Self- training with noisy student improves imagenet classification.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the IEEE/CVF conference on computer vision and pattern recognition.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 10687–10698.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [53] I Zeki Yalniz, Hervé Jégou, Kan Chen, Manohar Paluri, and Dhruv Mahajan.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2019.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Billion-scale semi-supervised learning for image classification.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' arXiv preprint arXiv:1905.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='00546 (2019).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [54] Tom Yeh, Tsung-Hsiang Chang, and Robert C Miller.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2009.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Sikuli: using GUI screenshots for search and automation.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 22nd annual ACM symposium on User interface software and technology.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 183–192.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' [55] Xiaoyi Zhang, Lilian de Greef, Amanda Swearngin, Samuel White, Kyle Murray, Lisa Yu, Qi Shan, Jeffrey Nichols, Jason Wu, Chris Fleizach, et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 2021.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Screen recognition: Creating accessibility metadata for mobile applications from pixels.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In Proceedings of the 2021 CHI Conference on Human Factors in Computing Systems.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1–15.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' A ADDITIONAL DATASET SAMPLES We provide additional samples from the WebUI (Figure 10) to sup- plement the example in the paper (Figure 2).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our example gallery shows several different types of websites, including login, landing, product, portfolio, and informational pages.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Each website is cap- tured using different simulated devices, which shows, among other things, how content responds to screen size.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We also computed the percentile-rank of each web page’s class distribution.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' B CLASS IMBALANCE ANALYSIS This section describes analysis of class imbalance of WebUI and its effect on transfer learning applications.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Similar to other UI datasets[55], WebUI exhibits an imbalance of UI element classes, where some types of elements (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', text) appear much more fre- quently than others (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', images).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Several aspects of WebUI (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', finer-grain text segmentation, multi-hot labels, and prevalence of documents on the web) also contributed to class imbalance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' We used a frequency-based resampling method to generate the Web7k-Resampled, which resulted in more examples of infrequent element types.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Our technique assigned weights to samples to in- crease the representation of UIs containing rare or infrequent ele- ment types, and we resampled based on the 10 element types shown in Figure 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Algorithm 1 provides an overview of our resampling technique.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Note that unlike some class-balancing algorithms (e.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='g.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=', SMOTE [10]), our technique does not generate additional synthetic samples and does not include the same screen more than once.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Web7k-Resampled contains proportionally more examples of many infrequent classes (Figure 3).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Figure 11 shows the proportional increase in screens containing each element type.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Figure 12 shows the proportional increase in the total number of elements for each type.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The results from our performance evaluations in the main paper suggest that this resampled split leads to improvements for each of our three tasks when compared to a randomly sampled subset of the same size.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Notably, the element detector model resampled 7k split outperformed the one trained on 70k random split, which suggests that element balancing was particularly useful for tasks where elements types are directly predicted.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Tests with other two tasks (screen classification and screen similarity) also led to im- provements for the resampled models;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' however, the gains were more modest.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' The improvements could be because the element distribution in the resampled split is closer to that of the target data.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' In addition, we provide a deeper analysis of the Element Detection WebUI: A Dataset for Enhancing Visual UI Understanding with Web Semantics CHI ’23, April 23–28, 2023, Hamburg, Germany Figure 10: Samples from WebUI accessed with different simulated devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For each screen, we compute its element type dis- tribution (normalized to 1).' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Then, we computed the percentile-rank of the top 10 classes with respect to the entire dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, the bottom row’s button class has a percentile-rank of 90, meaning the web page’s relative frequency of is greater than 90% of others in the dataset.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' TheOpenTracingpeject isarchivedLearmore MioratetrQpenTelemietry.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='today U OPENTRACING DOSSGUIDS PDOUEETGETMIVAIVEOCITHEEBLOG REGST SAYHIOWGUTER bash VendarnautralAesand instrumiemtationfofcistnbutedtracing Librariesavailableinglanguages So ZivScrintJaa.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Pyahon RuhyPliObiectiwcrCHiC The latestfromourblog Openi Tracing hasbeen Archived toove DADPWEARD MOTANSASA All advertisingisfirstcome,first choiceof placement, and mustbe paid in advance via PayPal.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' jpg or aifTormatprefered.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Adaltional single-pageads arehalf-price.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Websitesizesandpricesbelow Starneadvertisingratesand sizesare,perquarterlyissue $75/fullpage(5"wx8*h) $40/half-page (5"w x3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='8"h): $15/business-card size (2*x 3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='5 eitherway!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Banneradvertisingis availableatthebottomofevery pageonthesite,justabovethe copyrightandadvertising links CustomsizescanDeaanqed Leff sidebaradvertising is availableonanypagewitha menu sidebar.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Leftsidebarads are square orrectangular,120 pixels wide.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Riaht sidebaradvertisingis availableonlyonselectedpaqes see ndetpageasanexample)text link listitem image heading paragraph linebreak generic gridcell button 0 20 40 60 80 100NAYA To.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='search.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' :typeandhitentef.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' D FAMILY CENTER ABOUT SERVICES SUPPORTUS POLICY-ADVOCACY EVENTSBLOG SUPPORTUS WAYSTOGIVE DONATENOW Your qenerous donation limpacts the lives of 10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='o00peopleeach yearwith wraparound services thathelpour WAYSTOGIVE community thrive.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' LEADERSHIPGIVING Todonateonline,clickhereNAYA FAMILY Tosearch.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='typeandhitenter.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' CENTER ABOUT SERVICES SUPPORTUS POLICY-ADVOCACY EVENTS BLOG SUPPORTUS WAYSTOGIVE JONATENOW Yourgenerousdonationi impactsthe livesof10.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='0o0peopleeachyearwithwraporoundservicesthathelpour WAYSTOGIVE comumunitythrve LEADERSHIPGIVING Todonateonline,clickhete, GIRCLE MAKEADONATIONBY CHECK OLR PARTNERS AND MakeyourCheckpayableto:NAYAFamilyCenterNAYA o sealcLnype anct hitereetC FAMILY CENTER ABOUTSERVICES SUPPORTUS POLICY-ADVOCACYEVENTSBLOG SUPPORTUS WAYSTOGIVE DONATENGW WAYSTOGIVE community tithe LEADERSHIPGIVING To donateonline,cick here.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' CIRCLE MAKEASONATIONBYCHECK DURPARTNERS AND MakeyourcheckpayabletoNAYAFamilyCenter SUPPUHTERS Sendato: REIURNTO.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' NAYA Famlly Certer NEERGHDKIKOO 5135-NE Columbia Blvd Portand,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='OR97218NAYA FAMILY CENTER ABOUTSERVICESSUPPORTUSPOLICY-ADVOCACYEVENTSBLOG SUPPORTUS WAYSTOGIVE DONATENOA committythit wourgsemusdonatoninpcts WAYSTOGME LEADERSEIPOIVING Be danateonint,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='chice bere MAKEABONATIONBYCHECK CUREPARINERSANU SUPFOHTEHS Senditto: RETEFNTO Sn Nt Cohrta aid WoAPanilyCeter NIRCHIKIKOD) Partirid,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='O81 97218 Atm Develagnsert MONTHLYGIYING catamuficbankwithdrawalatbycharpingyoureredtcare werecesessunmaryufyeuramislgiisglustanieaprarinjpunuaigmjtasugendarstspyurrecierg sfng.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='NyFamlyCenbertheoppertunityto.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='tolanceogrmourosthrooghoutheyewr.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Fertaxerproetyou mattuia natDeutlomentDataSaeclwlidNAYA CENTER SAMILY TSEEEEE SUPPORTOS WAYSTOGIVE NSUOM MANAYA FAMILY CENTER ABOUT SERVICESSUPPORTUSPOLICY-ADVOCACY EVENTSBLOG SUPPORTUS WAYSTOGIVE TDNATEIIOW hurgenerousdoatignimgectsthelivesgrLe,ooopeeoleeodyearwitharapartunfservcesfathelpaur WAYSTOGIVE TADERSHIP GIVING To.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='donat RAL MAKEADONATIONEYCHECK OURPARINERSAND Makeyoorcheckgeyableto:NAVAFamiy Certe SUFPOBTERS Send ito RETURNTO wYATantyCentm NERCHOKIKOO 5135MEColusbiaBive Poitlid,ORST238 AllrtDevelepment MONTHEYGIMNG obetyou gitstortinewthephoetallerenailtoLisaHarmn.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='DeveloonerttetaSpecislisttaitonavoatusroo 5032as.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='a177x295.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='To.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='males1ecaminggit.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='cicthm BIETSONHONORORMEMORY Rementeralnvedonewthiagitmadenthesrhonerer.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='men noryGftsareackoowedjed througtscantsest mtherecipientotyourchoice.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='lomalsaDibutegit,tftrhat STOCINS Aconistionofshoresheldformorettanayesriseigibletertesaneincame.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='taxdeductionasgttsotcas, shosld yoeitemiae,Alto,ithedoratod.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='sharesincreasedinvolue.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Feuavoidmecaptaqainstaxmatweulidbe uwdstunpausstieeshaess Ce Torecelveareceiptandtoensyrethstyourtinsterisprocessedcotretty.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='pleaeletinxneefyeurstoctgint infonatisn Datoenitext link listitem image heading paragraph linebreak generic gridcell button 0 20 40 60 80 100The OpeiTracingprojectis-archivedLearn more Migrale to OpenTelenetry todayl COPENTRACING DOGUT电NVCVRY SAYHIGNITTER bash enwormewtraApisano instrurmentatiomtordistnoutecftragimg Librariesavallableinganguages SoaveScritt.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='lavePahon/Buby PHeDbimttiw.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='fteCn The:Iotestfrom.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='ourtatog Cieniracing has bren Archived DHDWUARD HOTAN SHPROSETheOpenTracngpmjeclisrarchived.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Lcanman oletoDpeniFemetiytodoy!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' D OPENTRACING OCEIFHOEGGETMVCOGITHEMSPTR SAYHIONGITTEE bash wemomrneuttatAPisand instrumenmlionfonalisintateotracine Libranesavailableinglangueges The:iatestfrorrourblog: gpen TracinghasbeenArchued ORETeOpenTracrg-projpoctit-anchiwoLalmincl DOPENTRACING bash VerworFerals=au tuineaonfonoaindedfahm LbranesavalableinSlangueges o WhyOoenTracino.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='infinite Warlds Store InfiniteHorrorsSubmissions.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="/FAQs PodcastLogin WELCOMETOTHEFUTURE Infinife Worlds Magazine is everythingyou've always wanted in a science fiction-magazine." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="Featuring a beautiful,high-quality print, pageafter page of sensational art, incredible comics,andmind- bending storiesfrom falents the world over, as wellas fascinating interviews with noteworthyfigures, Infinite Worlds is the magazine you've been waiting for allthese years, Full-color." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Ad-free.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Limited-edition.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Madebyfans,forfans.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='StoreInfiniteHarrors Submissions.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="/FAQs PodcastLogin WELCOMETOTHEFUTURE Infinite Worlds Magazine is everything you've always wanted in a science fiction magazine." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="Featuring a beautiful,high-quality print, pageafterpageofsensational art,incredible comics, andmind bending stories from talents the world over,as well as fascinating interviews with nofeworthyfigures, Infinite Worlds is themagazine you'vebeen waitingforall theseyears, Full-color." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Ad-free.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Limited-edition.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Madebyfans,forfans.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=".bocoup SERVICES RECEMIWORK BLOG ABOUTUS Search, Q CONTACTUS WESTANDINSOLIDARITYWITHTHEMOVEMENTFORBLACKLIVES NCLUSIVETECHNOLOGY CONSULTINGinfinitel warlds StoreInfinite Horrors Submissions/FAGs Podcas Login WELCOMETOTHEFUTURE Infinite Worlds Magazine is everything you've always wanted in ascience fiction magazine." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="Featuring abeautiful,high-qualityprint,page after page of sensationalart, incredible comics,and mind-bending storiesfrom talents the world ov,as wellsfascinafinginriews withnoworthyfigures, Infinit Worlds isthe magazine you've been waing for all hese years." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Full-color.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Ad-free,Limited-edition, Madebyfans, forfans.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='infinite warlds StorehhiniteHonois 5ubmissions /:FAGs Fodcat.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=" togin7 WELCOMETOTHEFUTURE Infinie WorfdsMogazinelseveryhingyou'vealweys worted insciencefictionmagazine." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Comick,andmind-bendinastoriesfrom lechfheworidover,oiwelanfoscinain all beve yson.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Full-color.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Ad-free.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Limited-edition.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=" Mode by hons lor fonDREAMFORGE CONNECTINGDREAMERSPASTANDFUTURE Fiction Nonmction Regustor Login StoryDetails Russ's Rockin Rollercoaster RussColchamiro." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="com ROLLERCOASTE SS0 DCKI ulor RUSS'S ROCKIO ROLLERCOASTER Check out Ruts's Rockir Rolleronaster for livepodcastswith SSFAuthars." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Chockthelatest schediuleatrusiscolchamro.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com LMEPOOCASTS DYNAAICSEAVTHORS CHECK OUTTHE SPRING/SHMMER20ZI SCHEDULEGT russcolchamito.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="conmDREAMFORGE CONNECTINGDEAMERS-RAST AND FUTURE VinitWebsite Issues Fiction Nonfiction Rogstor StoryDetails Russ's Rockin'Rollercoaster RussColchamiro." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="com OLLE Join RUSS'SROCKIn BOLLERCOASTER Check out Russs Rookin'Rolerccasle for livepodcastswith:SSF Authors." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' iusstolctiamro.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="com LAVEPODCASTS DYNANICSFFAMTHORS CHECKOUTTHE SPRING/SUMNER20Z SCHEDMLE OT soonsorerDREAMFRGE CONNECEHIG DKEAMERS=PABTAND PUTURE fratWecs FiEson Nonfetion Rogute Loan Story Details Russ's Rockin'Rollercoasterl RussColchamiro." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="com RUSS'SROCKIO BOLLERCDASTEE JOIn RUSSSROCKIN ROLLERCOASTER schedideatruticoiciuamrotoo LNEPODCASTS WDYNAMICSFEAUTHORS CHECK OUT THE SPRING/SUMMER2021 SCHEDULE aT russcoichomiro." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="com Sponsored by ourfriends ot DREAmFORGEDREAMFORGE Story Detailh Rust's Rockin'Rollercoterf RussColchamiro." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com USS ulor RUSSSF ROCKIO ROLLERCOASTER LVEPODCASTS WDYNAMICSFEAUTHORS CHECKOUTTHE SPRING/SMAMER2OZI SCHEDuLEaT russcolchamiro.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='cem ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='SponsoredbyourtriendsatDRe ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='FORGELEER ENING ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Constelacion ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='UNAREVISTAJIUNGOEDERICCIONESPECUL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Bmondos ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Leel ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Convocatonas ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Apoyo ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Aceroa de ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Contacto ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Constelacion ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='LEER ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='EDICION#2GLEERENINGLES ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Constelacion ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='NAREVISTABLNGUEDERCCONESPECULAIN ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Leer ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Corwocitorios ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Apoyo ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Acerco de "' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Contacto ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Constelacion ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='LEER ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='EDICION#2LEERENINGLES ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Constelacion ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='UNABEVABLNGOEDERCCIONESHCIL ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Apoyo ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Acerodd ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Contacto ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Constelacion ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='LEER ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='EDICION#2LEERENINGLES ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Constelacion ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Constelacion ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='LEER ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='EDICION#2MOMENT ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='STARTSEITE ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='MUSIKANLAGEMIETEN ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='PLAYLIST-MANAGER ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='ZUBEHOR ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="SOFUNKTIONIERT'S " metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="Hier gibt's was auf die " metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Ohren!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Wir haben den richtigen Sound und daspassende Equipment furdein Event Die Moment-juke bietet such professionellen Klang.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Mit unserem Playlist- Manager.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="treftthrdenGeschmackalleuererGaste undeure Partywird ein Erfolg MusikaniagemietenMOMENT STARTSEITE MUSIKANLAGEMIETEN PLAYLIST-MANAGER ZUBEHOR SOFUNKTIONIERT'S uke Hier gibt's was auf die Ohren!" metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Wir haben den,richtigen Sound und das passende Equipmentfurdein Event,Die Moment-Juke bietet euch professionellenKlang.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Mit unserem Playlist- Manager.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='trefft ihrden GeschmackalleuererGaste und eurePartywird ein Erfolg Musikanlagemietenbocoup SERVICES RECENTWORK BLOG ABOUTUS.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="- Search Q CONTACT US WESTANDINSOLIDARITYWITHTHEMOVEMENTFORBLACKLIVES INCLUSIVETECHNOLOGY CONSULTINGMOMENT STARTSETTE MUSIKANLAGEMIETEN PLAYLIST-MANAGER ZUBEHOR SOFUNKTIONIERY'S eue Hier gibt's wasauf die Ohren!" metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Wir haben den.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='nichtigen Soundund das passende Equipmentfur dein Event Die Moment-juke bietet euch professionellen Kang, Mit: unserem Playlist- Manager.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='treftihrden Geschmackal suerer.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="Giste und eurePartywird ein ErfolgMOMENT STARESEITE PLAYLIST-MANAGEH Hier gibt's was auf die Ohren!" metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Wir naben den.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='nchigen.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Sound und da passande EeulmentordenEventDiuMomantjunbisbst Manaper.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='truftrdenCeschmecallsuereGate euthgrofessinneilen tiangMiturstrem Playlist und-eurs Pity wind-in E/fsig Terminaussuchen Moment-Juke Abfeiernbocoup DONTACTUS Search Q SERVICES RECENTWORK BLOG ABOUTUS- OURTEAM MISSION & VALUES CAREERS WESTANDINSOLIDARITYWITHTHE MOVEMENTFORBLACKLIVES INCLUSIVE TECHNOLOGY CONSULTINGMioratetnQpunTdomiatrytodayl COPENTRACING 三 VenuorneuiralAPisamd mistiumentatonifordistributed traung Librares available in.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='gtanguages Thelaitestfromoorblog CaenTraring hasbaen.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Avchivod ZIPKINSUOTSTER AEE VNVISN mAOIN GRPCH QGokit obuelpWELCOMETOTHE FUTURE.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=" InfiniteWorlds Magazineis everythingyou've always." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='wanted in a science fiction mogazine.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=" Featuring abeautiful, high-qualityprint,page afterpageof sensationalart, incredible comics, and mind-bending stories from talents the world over,as well asfascinafing interviewswith noteworthyfigures, Infinite Worlds isthemagazineyou'vebeen waiting forall theseyears." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Full-color.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Ad-free.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Limited-edition.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Madebyfans,forfans.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='DREAMFORGE 三 Story Details RUSS OCKLO LERCOASTER PODCAST WDYMAMGSIFALTROES CHECKOUTTHE SPRING/SMAMERZOZT SCHEDMLEO tssco/chatiro.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="cam Sponsored by our friends ot OREAmFBRGE Check out RusssRockin' Rollercoasterfor livepodcasts with SSF Authors:Check." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='thelatest schedule.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='at russoolchamiro.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="com Russ's Rockin Rollercoasterl RussColchamiro." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="comConstelacion UNA REVISTABILUNGDEDEFICCIONESPECULATIVA LEER EN INGLES LEER EDICION#2 ConstelacionLogin 三 Hier gibt's wasauf die Ohren!" metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Wir haben den richtigen Sound und das passende Equipment furdein Event.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Die Moment-jukebieteteuch professionellenKlangMitunserem Playlist-Manager,trefftihrden Geschmackall euerer Gasteund eure Party wird ein Erfolg.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Musikanliagemieten Hochzeiten RedenSSimplenote 三 Thesimplestway tokeepnotes All your notes,synced on all your devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Get Simplenote now foriOS, Android,Mac,Windows,Linux,orinyour browser.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Signupnow Comprehensive underneath,simpleon thesurfacebocoup SERVOS RECENTWORK ELDE A80uT05-Seercb OINAIS WESTAND INSOLIDARITYWITHTHEMOVEMENTFORBLACKLIVES INCLUSIVE TECHNOLOGY CONSULTING WEPARTNERWITHTECH Weateateam of productand standards developers COMPANIESAND NONPROHIS workinewiihpartnersonenglneering.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='design,and otganizational strategy prujects that align with our TO INCREASEACCESSIBILITY missioniandvalaes.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Wearecommittedtomaking INCLUSION AND JUSTICEON techspacessaferformarginalizedpeopleinour ANDTHROUGHTHEWEB peojectsand on ourteam.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Read theCode ofChnduct which.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='govetns.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='ourwork Recent Projects Our recent projects includeengineering for Scratch (the beginner-frieadly programminglanguage),web standards for assistive technologies lke screen-readers,and evicton detense tools for tenaats In Los Angeles CountyTheOpenTracing.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='projectisarchwod Lswnimors innea Qanplemety today UOPENTRACING m VOVTRI tash enonheutralAmsand louneialicmarcismbutes Libranes:availabiein.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='9Jarguages Tnesaiesthomourblog TPKINeO WhyOpenTracing?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' LEARNABOUTOUR DESIGNPRINCIPLESinfinite warlds WELCOMETOTHE FUTURE.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' ercefiction.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='mogazine and wellasfascinating imeriewswithnoteworth waitingforlltheseyears Full-color.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Ad-froo,Limited-edition Made-byfans, foefun$bocoup SERVICES BLOG ABOUTUS Search a 皖COMACT1S WESTANDINSOLIDARITYWITHTHEMOVEMENTFORBLACKLIVES INCLUSIVETECHNOLOGY CONSULTING WEPARTNERWITHTECHCOMPANIES We are a team of product and standards developers working with partners on engineering, design, andorganizational ANDNONPROFITSTOINCREASE strategy projects that align with our mission andvalues,WeareDREAMEORGE Story Detail.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=" RUSSS ROCKID ROLLERCOASTER LME-POCAST SPRING/SUMAERZ92 CREDVLEad Rus'sRockin'Rollercoasterl RussColchamiro." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com RUSSSROCKIO ROLLERCOASTER LVEPODCASTS DYNANIC SIT AUTHORS CHECKOVTTHE SPRING/SUAMER202I SCHEDULEQT SponsoredbyourfriendsotDREAmFORGEConstelaci Convocototios Apoy Apeicode Centocto Constelacion LEER EDICION#2 iSuscribeteanuestroboletin de noticias!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="MOMENT llee Hier gibt's was auf die Ohren!" metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Wir.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="Kaben-Ben-richtigen Sound usid das passende Equiprnent fir-dein Event Din Momunt-juke hieteteuch'prodessiorilun Klang:Mitunserem,Piayligt." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Manager,trem inr den Gutchrack al euenet Gaste und Djepg u pen Xueg ina Mueikanlage mieten Termin Moment-Juke Abfeiern aussuchen mieten JuklekennenleirnenSSimplenote CatictutHeeBngLeghSipup Thesimplestway tokeepnotes All your notes,synced on all your devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Get Simplenote now for iQS, Android, Mac, Windows;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=" Linux,/or In your browser, BretVictor'sQuot Comprehensive underneath,simpleon the surfaceSimplenote Log In." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Sign up Thesimplestway tokeepnotes All your notes,synced on all yourdevices,Get Simplenote nowfor iOS, Android, Mac, Windows, Linux, or in your browser.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Moudnubis eehmn.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='simplecote.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="com All Mobes BretVictor'sQuoteCollectionSSimplenote ContactUsHeipBlog Log in 5ign up The simplest way to keep notes All yournotes,syncedonallyourdevices." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='GetSimplenotenowfor jos,Android,Mac, Windows, Linux,or.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='in yourbrowser.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Mou dn unis utossimplienotn.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="com BretVictor'sQuoteCollection AlanMoote:interviewontmtycomsimplenote Login 5ignUp Thesimplestway to keep notes All your notes, synced on all your devices." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Get Simplenote nowfor.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' iOS, Android, Mac, Windows, Linux, or in your browser, tpi://simplenote.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="com B BretVictor'sQuoteCollection Alan Moore:interview." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='on nitv.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com Ihave a theory, which hasnotletme down sofar, that there is an inv between imagination and money.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Because the more money.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='and techSSimplenote Centait Us teig Big Lagim Thesimplestway tokeep notes All your notes, synced on.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='allyour devices.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="Get Simplanote'now for O5;" metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Android;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Mac,Windows, Linux, orin.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='your.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='browser.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 0etitpsu/timplenote.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content="com All Note BretVictor'sQuoteCollection Alan Moore: interview on mtv." metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com Tme Ihave.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='a theory, which has not.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='let me:down so far, that there is sn iw ieis between irnagination and money.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Because the more money and tech available to [create]a work,the less imagination there wil be in it.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' http/www.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='mty.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='com/shured/movies/interview/m/moore_alun.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='CGo3ttext ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='link ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='listitem ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='image ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='heading ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='paragraph ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='linebreak ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='generic ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='gridcell ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='button ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='20 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='40 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='60 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='80 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='100text ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='link ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='listitem ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='image ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='heading ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='paragraph ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='linebreak ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='generic ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='gridcell ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='button ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='20 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='40 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='60 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='80 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='100text ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='link ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='listitem ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='image ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='heading ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='paragraph ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='linebreak ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='generic ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='gridcell ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='button ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='20 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='40 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='60 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='80 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='100text ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='link ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='listitem ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='image ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='heading ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='paragraph ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='linebreak ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='generic ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='button ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='20 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='40 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='60 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='80 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='100text ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='link ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='listitem ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='image ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='heading ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='paragraph ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='linebreak ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='generic ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='gridcell ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='button ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='20 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='40 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='60 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='80 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='100text ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='link ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='listitem ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='image ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='heading ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='paragraph ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='linebreak ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='generic ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='gridcell ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='button ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='0 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='20 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='40 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='60 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='80 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='100$bocoup ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='SERVIES ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='XEONDUDE ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='BLOB ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='ABOUTUS.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='+ Search a WESTANDIN INCLUSIVETECHNOLOGY CONSULTING WEPARTNERWITHTECHCOMPANIES We are ateam of product and standards developers.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='woridng with pirtners on engineering,design,and organizational ANDNONPROFITSTOINCREASE stateyejectsthataligthoumisstonndaluesWea ACCESSIBILITY.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='INCLUSION.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='AND commitedtomalingtecspacessafefrmaginalizedpeople JUSTICEONANDTHROUGHTHEWEB inourprojects-anidon our tean.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Read theCodeot Contha which governs out worktext link listitem image heading paragraph linebreak generic gridcell button 0 20 40 60 80 100ComicPress Home Blog Contact Us WiteFerUs ComiePress By Counsellr Lounch.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Updates A newer, Betfer.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Comic Theme Is Coming Soon!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Gecriotihedontaunctht ErtallAdidress* Suorat Cainnot Wait?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='ComicPress Hame Blog ContotrUs Wiae Fer Us ComicPressByCounsellr LiinchUpdotes Anewer,BetterComic ThemelsComing Soonl GntNothelfonfund EiallAddrtiss" Cannot Wait?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' ToGetThe Clossic Version On WordPress.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='org Click HmeComicPress Heme CostactUs Wite For Us ComicPress By Counsell LanechUpdates Anewur,Buttur Comic Theme h Coming Soon!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' GaNotfedanlaunn EmalAddres Sohmit Cannot Wait?' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' ToGetThe Clasuc Version OnWordPresorg ClidkHery yCoumsellWerdiirestSermemComicPress Han Cootist U, Wesfetlh ComicPress By Counsellr Liaanknilute Aoe;' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='BethrConeThenehCiigSoosl EmEAdT TeGetThrCaneVer Latest From The WordPress Blogbocoup ONTACTUS Search QComicPress 三Menu ComiePressBy Counsell Anewer,Better ComicTheme ls Coming Soonl Get Notified or aunch!' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Email Addlress* SubmitSbocoup WESTANDINSOLIDARITYWITHTHEMOVEMENTFORBLACKLIVESComicPress Have ig Cofaet Ui Wit Fer Us ComicPress ByCounsellr LounchUpdotai Arenir.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='BeterComeThemeComngSoe EmatATm TGThCletioOWardgtext link listitem image heading paragraph linebreak generic gridcell button 0 20 40 60 80 100Scienceficton.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='&fantasyPoetryAssociation Badvortisinaisfrtcometirstchoo rquarte Banner.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='advertisingis availableathebottom ofevery page on thesite.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' just above the warfSu 46ex.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='60-bonner:s150ryear.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='perpepe:$4n/year.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='site-wide 00250rectangl Fcod/rearste-wide are square or rectangular.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='120 pixeis widSciencefiction.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='&fantasuPoetryAssociatior gfformatpn size(2"x3.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='s"eitherway nsizescanbearange etn-e0bumer-stscyesrperpepe:s0/yesrste-aide aresquareor.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='rectanguiar,120pixelswide 12080 $75/yearserpape 120±90onxtontosL debat.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Left sidebat ad 120.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='a60 2120×240CHI ’23, April 23–28, 2023, Hamburg, Germany Wu et al.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Table 5: Average Precision (AP) of each element class (excluding the “Other" class) for the Element Detection task.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Element Type SSD (Random) FCOS (Random) FCOS (Web7k) FCOS (Web7k-Re.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=') FCOS (Web70k) FCOS (Web350k) Background Image 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='85 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='88 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='86 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='91 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='85 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='93 Checked View 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='06 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='28 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='31 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='34 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='32 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='38 Icon 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='72 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='73 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='75 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='75 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='75 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='77 Input Field 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='22 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='59 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='60 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='72 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='69 Image 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='73 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='8 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='77 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='82 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='78 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='82 Text 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='66 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='83 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='89 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='84 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='9 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='85 Text Button 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='57 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='9 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='94 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='94 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='95 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='94 Page Indicator 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='83 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='76 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='83 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='76 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='79 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='8 Pop-Up Window 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='85 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='83 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='8 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='85 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='78 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='83 Sliding Menu 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='95 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='98 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='96 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='98 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='96 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='97 Switch 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='97 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='93 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='86 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='97 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='91 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='94 mAP 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='67 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='77 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='79 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='80 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='79 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='81 Algorithm 1: Pseudo-code for the frequency-based resam- pling algorithm used to generate the Web7k-Resampled split.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' 1 function SampleSplit (𝑁,𝐶,𝑆);' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Input :Number of samples to choose 𝑁,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' list of element classes 𝐶,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' and list of samples 𝑆 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='Output:Resampled subset of 𝑆 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='/* Vector containing total frequencies for 𝑐 ∈ 𝐶 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='/ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='2 𝑓𝐶 ← total # of elements in 𝑆 for each class ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='/* Matrix where rows are 𝑠 ∈ 𝑆 and columns are ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='normalized frequency of 𝑐 ∈ 𝐶 for 𝑠 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='/ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='3 𝑓𝑆 ← frequency of classes 𝑐 ∈ 𝐶 (columns) for 𝑠 ∈ 𝑆 (rows) ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='/* Assign sampling weights to 𝑐 ∈ 𝐶 inversely ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='proportional to frequency ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='/ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='4 𝑤𝐶 ← [ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='1 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='𝑓𝐶 [𝑐] | 𝑐 ∈ 𝐶] ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='5 samples ← [] ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='/* Repeat until desired split size is reached ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='/ ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='6 while len(samples) < 𝑁 do ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7 ' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='𝑐𝑠 ← Sample(𝐶,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='𝑤𝐶) 8 𝑤𝑠 ← [𝑓𝑆 [𝑠,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='𝑐𝑠] | 𝑠 ∈ 𝑆] 9 sample ← SampleWithoutReplace(𝑆,' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='𝑤𝑠) 10 add sample to samples 11 end 12 return samples Relative Freq.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Change 0 1 2 3 text link list item image heading paragraph line break generic grid cell button Change in Screen Frequency after Resampling Figure 11: We calculated the change in frequency (expressed as a ratio) of screens containing at least one of each element type after resampling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, the number of screens containing at least one image element is 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='7x more than in the randomly sampled set.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Relative Freq.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Change 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='0 0.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='5 1.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='0 1.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='5 2.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='0 text link list item image heading paragraph line break generic grid cell button Change in Element Frequency after Resampling Figure 12: We calculated the change in frequency (expressed as a ratio) of total number of elements after resampling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' For example, the average screen in the resampled split con- tains 1.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content='3x more images.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Note that is possible for most el- ement classes to increase in frequency (while not having other classes experience a proportional decrease) because el- ement classes are not mutually exclusive, and the resampled split contains more elements that are assigned multiple tags.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' class, which is most likely to be affected by element type imbal- ance.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}
+page_content=' Table 5 shows that the Web7k-resampled split has higher AP for classes like "Text Button" and "Image", which had increased representation after resampling.' metadata={'source': '/home/zjlab/wf/langchain-ChatGLM/knowledge_base/DtFQT4oBgHgl3EQfPzZf/content/2301.13280v1.pdf'}