vault backup: 2025-04-01 13:01:58
This commit is contained in:
parent
e97bcb3b9b
commit
d9510470cd
@ -3,7 +3,7 @@ let s:so_save = &g:so | let s:siso_save = &g:siso | setg so=0 siso=0 | setl so=-
|
||||
let v:this_session=expand("<sfile>:p")
|
||||
silent only
|
||||
silent tabonly
|
||||
cd ~/Documents/Dane\'s\ Vault/9999\ Personal/Journal
|
||||
cd ~/Documents/Dane\'s\ Vault/5\ Thesis
|
||||
if expand('%') == '' && !&modified && line('$') <= 1 && getline(1) == ''
|
||||
let s:wipebuf = bufnr('%')
|
||||
endif
|
||||
@ -13,11 +13,13 @@ if &shortmess =~ 'A'
|
||||
else
|
||||
set shortmess=aoO
|
||||
endif
|
||||
badd +16 ~/Documents/Dane\'s\ Vault/5\ Thesis/4\ Meetings/DGC_DAS_328.md
|
||||
badd +16 4\ Meetings/DGC_DAS_328.md
|
||||
badd +32 5\ Resources/Literature_Reviews/A\ Review\ of\ Formal\ Methods\ applied\ to\ Machine\ Learning.md
|
||||
argglobal
|
||||
%argdel
|
||||
edit ~/Documents/Dane\'s\ Vault/5\ Thesis/4\ Meetings/DGC_DAS_328.md
|
||||
edit 5\ Resources/Literature_Reviews/A\ Review\ of\ Formal\ Methods\ applied\ to\ Machine\ Learning.md
|
||||
argglobal
|
||||
balt 4\ Meetings/DGC_DAS_328.md
|
||||
setlocal fdm=manual
|
||||
setlocal fde=0
|
||||
setlocal fmr={{{,}}}
|
||||
@ -28,12 +30,12 @@ setlocal fdn=20
|
||||
setlocal fen
|
||||
silent! normal! zE
|
||||
let &fdl = &fdl
|
||||
let s:l = 16 - ((15 * winheight(0) + 30) / 60)
|
||||
let s:l = 11 - ((10 * winheight(0) + 27) / 55)
|
||||
if s:l < 1 | let s:l = 1 | endif
|
||||
keepjumps exe s:l
|
||||
normal! zt
|
||||
keepjumps 16
|
||||
normal! 080|
|
||||
keepjumps 11
|
||||
normal! 058|
|
||||
tabnext 1
|
||||
if exists('s:wipebuf') && len(win_findbuf(s:wipebuf)) == 0 && getbufvar(s:wipebuf, '&buftype') isnot# 'terminal'
|
||||
silent exe 'bwipe ' . s:wipebuf
|
||||
|
||||
@ -11,4 +11,23 @@ This review seems to talk a good bit about that.
|
||||
than other types of programming. Things such as computer vision and decision
|
||||
making are very difficult to program manually relative to their ML counterpart.
|
||||
|
||||
- Abstract interpretation can connect different formal methods that otherwise
|
||||
would seem disjoint, according to these authors.
|
||||
|
||||
- They spend a lot of pages (~15) on *formal methods for neural networks*.
|
||||
There are two kinds.
|
||||
|
||||
- **Complete Formal Methods** are sound and complete, and can provide
|
||||
counterexamples where appropriate. That being said, they are not usually
|
||||
sound on floating point arithmetic and often neglect rounding errors.
|
||||
|
||||
- **Incomplete Formal Methods** are generally able to scale to larger
|
||||
neural networks, and are often sound, but generally suffer from false
|
||||
positives.
|
||||
|
||||
- In comparison, they spend 2 pages on support vector machines and decision
|
||||
trees :( What did they ever do to you man!
|
||||
|
||||
- They also mention that formal methods for data preparation and training are
|
||||
very much works in progress.
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user