diff --git a/.obsidian/plugins/obsidian-style-settings/data.json b/.obsidian/plugins/obsidian-style-settings/data.json index 32f31076..11903aec 100755 --- a/.obsidian/plugins/obsidian-style-settings/data.json +++ b/.obsidian/plugins/obsidian-style-settings/data.json @@ -3,7 +3,7 @@ "anuppuccin-theme-settings@@anuppuccin-accent-toggle": true, "anuppuccin-theme-settings-extended@@anp-theme-ext-light": true, "anuppuccin-theme-settings-extended@@anp-theme-ext-dark": true, - "anuppuccin-theme-settings-extended@@catppuccin-theme-extended": "ctp-gruvbox-light", + "anuppuccin-theme-settings-extended@@catppuccin-theme-extended": "ctp-notion-light", "anuppuccin-theme-settings-extended@@catppuccin-theme-dark-extended": "ctp-gruvbox-dark", "anuppuccin-theme-settings@@anuppuccin-theme-dark": "ctp-mocha", "anuppuccin-theme-settings@@anp-custom-checkboxes": true, diff --git a/201 Metadata/My Library.bib b/201 Metadata/My Library.bib index 52c15855..821eac82 100644 --- a/201 Metadata/My Library.bib +++ b/201 Metadata/My Library.bib @@ -10754,22 +10754,6 @@ Subject\_term: Careers, Politics, Policy}, } @online{sohl-dicksteinDeepUnsupervisedLearning2015, - title = {Deep {{Unsupervised Learning}} Using {{Nonequilibrium Thermodynamics}}}, - author = {Sohl-Dickstein, Jascha and Weiss, Eric A. and Maheswaranathan, Niru and Ganguli, Surya}, - date = {2015-11-18}, - eprint = {1503.03585}, - eprinttype = {arXiv}, - eprintclass = {cond-mat, q-bio, stat}, - doi = {10.48550/arXiv.1503.03585}, - url = {http://arxiv.org/abs/1503.03585}, - urldate = {2024-07-16}, - abstract = {A central problem in machine learning involves modeling complex data-sets using highly flexible families of probability distributions in which learning, sampling, inference, and evaluation are still analytically or computationally tractable. Here, we develop an approach that simultaneously achieves both flexibility and tractability. The essential idea, inspired by non-equilibrium statistical physics, is to systematically and slowly destroy structure in a data distribution through an iterative forward diffusion process. We then learn a reverse diffusion process that restores structure in data, yielding a highly flexible and tractable generative model of the data. This approach allows us to rapidly learn, sample from, and evaluate probabilities in deep generative models with thousands of layers or time steps, as well as to compute conditional and posterior probabilities under the learned model. We additionally release an open source reference implementation of the algorithm.}, - pubstate = {prepublished}, - keywords = {Computer Science - Machine Learning,Condensed Matter - Disordered Systems and Neural Networks,Quantitative Biology - Neurons and Cognition,Statistics - Machine Learning}, - file = {/home/danesabo/Zotero/storage/M24N888A/Sohl-Dickstein et al. - 2015 - Deep Unsupervised Learning using Nonequilibrium Th.pdf;/home/danesabo/Zotero/storage/APVVMP7V/1503.html} -} - -@online{sohl-dicksteinDeepUnsupervisedLearning2015a, title = {Deep {{Unsupervised Learning}} Using {{Nonequilibrium Thermodynamics}}}, author = {Sohl-Dickstein, Jascha and Weiss, Eric A. and Maheswaranathan, Niru and Ganguli, Surya}, date = {2015-11-18},