From 71447ab4a48d6d49b5c1d332f1122b61c680941d Mon Sep 17 00:00:00 2001 From: Matthias Veigel Date: Sun, 18 May 2025 00:06:15 +0200 Subject: [PATCH] Added data extraction items. --- .vscode/settings.json | 3 +++ main.typ | 51 ++++++++++++++++++++++++++++++++++++++----- 2 files changed, 49 insertions(+), 5 deletions(-) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..02ac0ce --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "editor.wordWrap": "on" +} diff --git a/main.typ b/main.typ index dd63b59..0461ca8 100644 --- a/main.typ +++ b/main.typ @@ -1,5 +1,5 @@ //#import "@preview/clean-acmart:0.0.1": acmart, acmart-ccs, acmart-keywords, acmart-ref, to-string -#import "clean-acmart.typ": acmart, acmart-ccs, acmart-keywords, acmart-ref, to-string +#import "clean-acmart.typ": acmart #import "@preview/cetz:0.3.4" #let title = [Dataflow Analysis for Compiler Optimization] @@ -16,7 +16,6 @@ title: title, authors: authors, copyright: none - //page: "A4" // Set review to submission ID for the review process or to "none" for the final version. // review: [\#001], ) @@ -39,7 +38,7 @@ #set heading(numbering: "1.1.1") = Abstract -// define DFA here or in introduction +// define DFA and CO here or in introduction todo = Introduction @@ -72,8 +71,11 @@ My search strategy consisted of 4 steps as seen in @sas_fig. \ ) The papers from the first steps are collected from the electronic databases ACM Digital Library, IEEE Xplore, Springer Link, Web of Science with the search string seen in @sas_search_string. -The search string in @sas_search_string was created using the research questions in @research_questions_s and was always applied to the full text of the papers. - +The search string in @sas_search_string was created using the research questions in @research_questions_s and was always applied to the full text of the papers. \ +In the second step all duplicates which where returned from multiple databases where removed from the results. \ +In the third step the selection was filtered by applying all selection criteria from @selection_criteria_s. \ +In the forth step I snowballed the previously acquired results. This was to find relevant papers which where not included because of either the search string or the search criteria. \ +Afterwards all papers where evaluated based on the data extraction items mentioned in @data_extraction_s. #place( bottom + center, scope: "parent", @@ -121,6 +123,45 @@ The search string in @sas_search_string was created using the research questions ] ) +== Selection criteria +#[ + #set enum(numbering: (.., i) => "IC" + str(i)) +] +#[ + #set enum(numbering: (.., i) => "EC" + str(i)) +] + +== Data extraction +#place( + bottom + center, + scope: "parent", + float: true, + [ + #set par(leading: 0.3em) + #set text(size: 9pt) + #figure( + caption: [Data items], + supplement: "Table", + table( + columns: (1fr, 8fr, 2fr), + stroke: (x, y) => if y == 0 { (bottom: 0.7pt + black) }, + align: left, + inset: (x: 6pt, y: 2pt), + [ID], [Data], [Purpose], + ..( + ([Author(s)], [Documentation]), + ([Publication year], [Documentation]), + ([Title], [Documentation]), + ([Named advantage(s) of DFA for CO], [RQ1]), + ([Named disadvantage(s) of DFA for CO], [RQ1]), + ([Analyzed compilers], [RQ2]), + ([In what way is DFA used], [RQ2]) + ).enumerate(start: 1).map(((i, arr)) => ([D#i], ..arr)).flatten() + ) + ) + ] +) + #bibliography("refs.bib", title: "References", style: "association-for-computing-machinery") #colbreak(weak: true)